Skip to content

Commit 48ccbf5

Browse files
authored
Merge pull request #505 from Scale3-Labs/release-3.8.7
Release `3.8.7`
2 parents cad4b87 + 72e325a commit 48ccbf5

File tree

5 files changed

+210
-63
lines changed

5 files changed

+210
-63
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -163,3 +163,4 @@ chroma.sqlite3
163163
#.idea/
164164

165165
logs/
166+
playground/

src/langtrace_python_sdk/instrumentation/openai/instrumentation.py

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,8 @@
2424
async_embeddings_create,
2525
async_images_generate,
2626
chat_completions_create,
27+
openai_responses_create,
28+
async_openai_responses_create,
2729
embeddings_create,
2830
images_edit,
2931
images_generate,
@@ -32,7 +34,7 @@
3234
logging.basicConfig(level=logging.FATAL)
3335

3436

35-
class OpenAIInstrumentation(BaseInstrumentor): # type: ignore
37+
class OpenAIInstrumentation(BaseInstrumentor): # type: ignore
3638

3739
def instrumentation_dependencies(self) -> Collection[str]:
3840
return ["openai >= 0.27.0", "trace-attributes >= 4.0.5"]
@@ -54,6 +56,18 @@ def _instrument(self, **kwargs: Any) -> None:
5456
async_chat_completions_create(version, tracer),
5557
)
5658

59+
wrap_function_wrapper(
60+
"openai.resources.responses",
61+
"AsyncResponses.create",
62+
async_openai_responses_create(version, tracer),
63+
)
64+
65+
wrap_function_wrapper(
66+
"openai.resources.responses",
67+
"Responses.create",
68+
openai_responses_create(version, tracer),
69+
)
70+
5771
wrap_function_wrapper(
5872
"openai.resources.images",
5973
"Images.generate",

src/langtrace_python_sdk/instrumentation/openai/patch.py

Lines changed: 128 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -7,27 +7,121 @@
77
from opentelemetry.trace.propagation import set_span_in_context
88
from opentelemetry.trace.status import Status, StatusCode
99

10-
from langtrace_python_sdk.constants.instrumentation.common import \
11-
SERVICE_PROVIDERS
10+
from langtrace_python_sdk.constants.instrumentation.common import SERVICE_PROVIDERS
1211
from langtrace_python_sdk.constants.instrumentation.openai import APIS
1312
from langtrace_python_sdk.instrumentation.openai.types import (
14-
ChatCompletionsCreateKwargs, ContentItem, EmbeddingsCreateKwargs,
15-
ImagesEditKwargs, ImagesGenerateKwargs, ResultType)
13+
ChatCompletionsCreateKwargs,
14+
ContentItem,
15+
EmbeddingsCreateKwargs,
16+
ImagesEditKwargs,
17+
ImagesGenerateKwargs,
18+
ResultType,
19+
)
1620
from langtrace_python_sdk.types import NOT_GIVEN
1721
from langtrace_python_sdk.utils import set_span_attribute
18-
from langtrace_python_sdk.utils.llm import (StreamWrapper,
19-
calculate_prompt_tokens,
20-
get_base_url, get_extra_attributes,
21-
get_langtrace_attributes,
22-
get_llm_request_attributes,
23-
get_llm_url, get_span_name,
24-
get_tool_calls, is_streaming,
25-
set_event_completion,
26-
set_span_attributes,
27-
set_usage_attributes)
22+
from langtrace_python_sdk.utils.llm import (
23+
StreamWrapper,
24+
calculate_prompt_tokens,
25+
get_base_url,
26+
get_extra_attributes,
27+
get_langtrace_attributes,
28+
get_llm_request_attributes,
29+
get_llm_url,
30+
get_span_name,
31+
get_tool_calls,
32+
is_streaming,
33+
set_event_completion,
34+
set_span_attributes,
35+
set_usage_attributes,
36+
)
2837
from langtrace_python_sdk.utils.silently_fail import silently_fail
2938

3039

40+
def async_openai_responses_create(version: str, tracer: Tracer) -> Callable:
41+
"""Wrap the `create` method of the `openai.AsyncResponse.create` class to trace it."""
42+
43+
async def traced_method(
44+
wrapped: Callable, instance: Any, args: List[Any], kwargs: Dict[str, Any]
45+
):
46+
input_value = kwargs.get("input")
47+
prompt = (
48+
input_value[0]
49+
if isinstance(input_value, list)
50+
else [{"role": "user", "content": input_value}]
51+
)
52+
service_provider = SERVICE_PROVIDERS["OPENAI"]
53+
span_attributes = {
54+
"instructions": kwargs.get("instructions"),
55+
**get_langtrace_attributes(version, service_provider, vendor_type="llm"),
56+
**get_llm_request_attributes(
57+
kwargs,
58+
operation_name="openai.responses.create",
59+
prompts=prompt,
60+
),
61+
}
62+
with tracer.start_as_current_span(
63+
name="openai.responses.create",
64+
kind=SpanKind.CLIENT,
65+
context=set_span_in_context(trace.get_current_span()),
66+
) as span:
67+
try:
68+
set_span_attributes(span, span_attributes)
69+
70+
response = await wrapped(*args, **kwargs)
71+
_set_openai_agentic_response_attributes(span, response)
72+
73+
return response
74+
except Exception as err:
75+
span.record_exception(err)
76+
raise
77+
78+
return traced_method
79+
80+
81+
def openai_responses_create(version: str, tracer: Tracer) -> Callable:
82+
"""Wrap the `create` method of the `openai.responses.create` class to trace it."""
83+
84+
def traced_method(
85+
wrapped: Callable, instance: Any, args: List[Any], kwargs: Dict[str, Any]
86+
):
87+
input_value = kwargs.get("input")
88+
prompt = (
89+
input_value[0]
90+
if isinstance(input_value, list)
91+
else [{"role": "user", "content": input_value}]
92+
)
93+
service_provider = SERVICE_PROVIDERS["OPENAI"]
94+
span_attributes = {
95+
"instructions": kwargs.get("instructions"),
96+
**get_langtrace_attributes(version, service_provider, vendor_type="llm"),
97+
**get_llm_request_attributes(
98+
kwargs,
99+
operation_name="openai.responses.create",
100+
prompts=prompt,
101+
),
102+
}
103+
with tracer.start_as_current_span(
104+
name="openai.responses.create",
105+
kind=SpanKind.CLIENT,
106+
context=set_span_in_context(trace.get_current_span()),
107+
end_on_exit=False,
108+
) as span:
109+
try:
110+
set_span_attributes(span, span_attributes)
111+
112+
response = wrapped(*args, **kwargs)
113+
if is_streaming(kwargs) and span.is_recording():
114+
return StreamWrapper(response, span)
115+
else:
116+
_set_openai_agentic_response_attributes(span, response)
117+
return response
118+
except Exception as err:
119+
span.record_exception(err)
120+
raise
121+
122+
return traced_method
123+
124+
31125
def filter_valid_attributes(attributes):
32126
"""Filter attributes where value is not None, not an empty string, and not openai.NOT_GIVEN."""
33127
return {
@@ -634,6 +728,21 @@ def extract_content(choice: Any) -> Union[str, List[Dict[str, Any]], Dict[str, A
634728
return ""
635729

636730

731+
def _set_openai_agentic_response_attributes(span: Span, response) -> None:
732+
set_span_attribute(span, SpanAttributes.LLM_RESPONSE_ID, response.id)
733+
set_span_attribute(span, SpanAttributes.LLM_RESPONSE_MODEL, response.model)
734+
set_event_completion(span, [{"role": "assistant", "content": response.output_text}])
735+
set_usage_attributes(
736+
span,
737+
{
738+
"input_tokens": response.usage.input_tokens,
739+
"output_tokens": response.usage.output_tokens,
740+
"total_tokens": response.usage.total_tokens,
741+
"cached_tokens": response.usage.input_tokens_details["cached_tokens"],
742+
},
743+
)
744+
745+
637746
@silently_fail
638747
def _set_input_attributes(
639748
span: Span, kwargs: ChatCompletionsCreateKwargs, attributes: LLMSpanAttributes
@@ -707,5 +816,9 @@ def _set_response_attributes(span: Span, result: ResultType) -> None:
707816
set_span_attribute(
708817
span,
709818
"gen_ai.usage.cached_tokens",
710-
result.usage.prompt_tokens_details.cached_tokens if result.usage.prompt_tokens_details else 0,
819+
(
820+
result.usage.prompt_tokens_details.cached_tokens
821+
if result.usage.prompt_tokens_details
822+
else 0
823+
),
711824
)

0 commit comments

Comments
 (0)