Skip to content

Commit db20e15

Browse files
committed
Support responses api
1 parent cad4b87 commit db20e15

File tree

3 files changed

+143
-16
lines changed

3 files changed

+143
-16
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -163,3 +163,4 @@ chroma.sqlite3
163163
#.idea/
164164

165165
logs/
166+
playground/

src/langtrace_python_sdk/instrumentation/openai/instrumentation.py

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,8 @@
2424
async_embeddings_create,
2525
async_images_generate,
2626
chat_completions_create,
27+
openai_responses_create,
28+
async_openai_responses_create,
2729
embeddings_create,
2830
images_edit,
2931
images_generate,
@@ -32,7 +34,7 @@
3234
logging.basicConfig(level=logging.FATAL)
3335

3436

35-
class OpenAIInstrumentation(BaseInstrumentor): # type: ignore
37+
class OpenAIInstrumentation(BaseInstrumentor): # type: ignore
3638

3739
def instrumentation_dependencies(self) -> Collection[str]:
3840
return ["openai >= 0.27.0", "trace-attributes >= 4.0.5"]
@@ -54,6 +56,18 @@ def _instrument(self, **kwargs: Any) -> None:
5456
async_chat_completions_create(version, tracer),
5557
)
5658

59+
wrap_function_wrapper(
60+
"openai.resources.responses",
61+
"AsyncResponses.create",
62+
async_openai_responses_create(version, tracer),
63+
)
64+
65+
wrap_function_wrapper(
66+
"openai.resources.responses",
67+
"Responses.create",
68+
openai_responses_create(version, tracer),
69+
)
70+
5771
wrap_function_wrapper(
5872
"openai.resources.images",
5973
"Images.generate",

src/langtrace_python_sdk/instrumentation/openai/patch.py

Lines changed: 127 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -7,27 +7,120 @@
77
from opentelemetry.trace.propagation import set_span_in_context
88
from opentelemetry.trace.status import Status, StatusCode
99

10-
from langtrace_python_sdk.constants.instrumentation.common import \
11-
SERVICE_PROVIDERS
10+
from langtrace_python_sdk.constants.instrumentation.common import SERVICE_PROVIDERS
1211
from langtrace_python_sdk.constants.instrumentation.openai import APIS
1312
from langtrace_python_sdk.instrumentation.openai.types import (
14-
ChatCompletionsCreateKwargs, ContentItem, EmbeddingsCreateKwargs,
15-
ImagesEditKwargs, ImagesGenerateKwargs, ResultType)
13+
ChatCompletionsCreateKwargs,
14+
ContentItem,
15+
EmbeddingsCreateKwargs,
16+
ImagesEditKwargs,
17+
ImagesGenerateKwargs,
18+
ResultType,
19+
)
1620
from langtrace_python_sdk.types import NOT_GIVEN
1721
from langtrace_python_sdk.utils import set_span_attribute
18-
from langtrace_python_sdk.utils.llm import (StreamWrapper,
19-
calculate_prompt_tokens,
20-
get_base_url, get_extra_attributes,
21-
get_langtrace_attributes,
22-
get_llm_request_attributes,
23-
get_llm_url, get_span_name,
24-
get_tool_calls, is_streaming,
25-
set_event_completion,
26-
set_span_attributes,
27-
set_usage_attributes)
22+
from langtrace_python_sdk.utils.llm import (
23+
StreamWrapper,
24+
calculate_prompt_tokens,
25+
get_base_url,
26+
get_extra_attributes,
27+
get_langtrace_attributes,
28+
get_llm_request_attributes,
29+
get_llm_url,
30+
get_span_name,
31+
get_tool_calls,
32+
is_streaming,
33+
set_event_completion,
34+
set_span_attributes,
35+
set_usage_attributes,
36+
)
2837
from langtrace_python_sdk.utils.silently_fail import silently_fail
2938

3039

40+
def async_openai_responses_create(version: str, tracer: Tracer) -> Callable:
41+
"""Wrap the `create` method of the `openai.AsyncResponse.create` class to trace it."""
42+
43+
async def traced_method(
44+
wrapped: Callable, instance: Any, args: List[Any], kwargs: Dict[str, Any]
45+
):
46+
input_value = kwargs.get("input")
47+
prompt = (
48+
input_value[0]
49+
if isinstance(input_value, list)
50+
else [{"role": "user", "content": input_value}]
51+
)
52+
service_provider = SERVICE_PROVIDERS["OPENAI"]
53+
span_attributes = {
54+
"instructions": kwargs.get("instructions"),
55+
**get_langtrace_attributes(version, service_provider, vendor_type="llm"),
56+
**get_llm_request_attributes(
57+
kwargs,
58+
operation_name="openai.responses.create",
59+
prompts=prompt,
60+
),
61+
}
62+
with tracer.start_as_current_span(
63+
name="openai.responses.create",
64+
kind=SpanKind.CLIENT,
65+
context=set_span_in_context(trace.get_current_span()),
66+
) as span:
67+
try:
68+
set_span_attributes(span, span_attributes)
69+
70+
response = await wrapped(*args, **kwargs)
71+
_set_openai_agentic_response_attributes(span, response)
72+
73+
return response
74+
except Exception as err:
75+
span.record_exception(err)
76+
raise
77+
78+
return traced_method
79+
80+
81+
def openai_responses_create(version: str, tracer: Tracer) -> Callable:
82+
"""Wrap the `create` method of the `openai.responses.create` class to trace it."""
83+
84+
def traced_method(
85+
wrapped: Callable, instance: Any, args: List[Any], kwargs: Dict[str, Any]
86+
):
87+
input_value = kwargs.get("input")
88+
prompt = (
89+
input_value[0]
90+
if isinstance(input_value, list)
91+
else [{"role": "user", "content": input_value}]
92+
)
93+
service_provider = SERVICE_PROVIDERS["OPENAI"]
94+
span_attributes = {
95+
"instructions": kwargs.get("instructions"),
96+
**get_langtrace_attributes(version, service_provider, vendor_type="llm"),
97+
**get_llm_request_attributes(
98+
kwargs,
99+
operation_name="openai.responses.create",
100+
prompts=prompt,
101+
),
102+
}
103+
with tracer.start_as_current_span(
104+
name="openai.responses.create",
105+
kind=SpanKind.CLIENT,
106+
context=set_span_in_context(trace.get_current_span()),
107+
) as span:
108+
try:
109+
set_span_attributes(span, span_attributes)
110+
111+
response = wrapped(*args, **kwargs)
112+
_set_openai_agentic_response_attributes(span, response)
113+
114+
print("3. Response", response)
115+
116+
return response
117+
except Exception as err:
118+
span.record_exception(err)
119+
raise
120+
121+
return traced_method
122+
123+
31124
def filter_valid_attributes(attributes):
32125
"""Filter attributes where value is not None, not an empty string, and not openai.NOT_GIVEN."""
33126
return {
@@ -634,6 +727,21 @@ def extract_content(choice: Any) -> Union[str, List[Dict[str, Any]], Dict[str, A
634727
return ""
635728

636729

730+
def _set_openai_agentic_response_attributes(span: Span, response) -> None:
731+
set_span_attribute(span, SpanAttributes.LLM_RESPONSE_ID, response.id)
732+
set_span_attribute(span, SpanAttributes.LLM_RESPONSE_MODEL, response.model)
733+
set_event_completion(span, [{"role": "assistant", "content": response.output_text}])
734+
set_usage_attributes(
735+
span,
736+
{
737+
"input_tokens": response.usage.input_tokens,
738+
"output_tokens": response.usage.output_tokens,
739+
"total_tokens": response.usage.total_tokens,
740+
"cached_tokens": response.usage.input_tokens_details["cached_tokens"],
741+
},
742+
)
743+
744+
637745
@silently_fail
638746
def _set_input_attributes(
639747
span: Span, kwargs: ChatCompletionsCreateKwargs, attributes: LLMSpanAttributes
@@ -707,5 +815,9 @@ def _set_response_attributes(span: Span, result: ResultType) -> None:
707815
set_span_attribute(
708816
span,
709817
"gen_ai.usage.cached_tokens",
710-
result.usage.prompt_tokens_details.cached_tokens if result.usage.prompt_tokens_details else 0,
818+
(
819+
result.usage.prompt_tokens_details.cached_tokens
820+
if result.usage.prompt_tokens_details
821+
else 0
822+
),
711823
)

0 commit comments

Comments
 (0)