|  | 
| 7 | 7 | from opentelemetry.trace.propagation import set_span_in_context | 
| 8 | 8 | from opentelemetry.trace.status import Status, StatusCode | 
| 9 | 9 | 
 | 
| 10 |  | -from langtrace_python_sdk.constants.instrumentation.common import \ | 
| 11 |  | -    SERVICE_PROVIDERS | 
|  | 10 | +from langtrace_python_sdk.constants.instrumentation.common import SERVICE_PROVIDERS | 
| 12 | 11 | from langtrace_python_sdk.constants.instrumentation.openai import APIS | 
| 13 | 12 | from langtrace_python_sdk.instrumentation.openai.types import ( | 
| 14 |  | -    ChatCompletionsCreateKwargs, ContentItem, EmbeddingsCreateKwargs, | 
| 15 |  | -    ImagesEditKwargs, ImagesGenerateKwargs, ResultType) | 
|  | 13 | +    ChatCompletionsCreateKwargs, | 
|  | 14 | +    ContentItem, | 
|  | 15 | +    EmbeddingsCreateKwargs, | 
|  | 16 | +    ImagesEditKwargs, | 
|  | 17 | +    ImagesGenerateKwargs, | 
|  | 18 | +    ResultType, | 
|  | 19 | +) | 
| 16 | 20 | from langtrace_python_sdk.types import NOT_GIVEN | 
| 17 | 21 | from langtrace_python_sdk.utils import set_span_attribute | 
| 18 |  | -from langtrace_python_sdk.utils.llm import (StreamWrapper, | 
| 19 |  | -                                            calculate_prompt_tokens, | 
| 20 |  | -                                            get_base_url, get_extra_attributes, | 
| 21 |  | -                                            get_langtrace_attributes, | 
| 22 |  | -                                            get_llm_request_attributes, | 
| 23 |  | -                                            get_llm_url, get_span_name, | 
| 24 |  | -                                            get_tool_calls, is_streaming, | 
| 25 |  | -                                            set_event_completion, | 
| 26 |  | -                                            set_span_attributes, | 
| 27 |  | -                                            set_usage_attributes) | 
|  | 22 | +from langtrace_python_sdk.utils.llm import ( | 
|  | 23 | +    StreamWrapper, | 
|  | 24 | +    calculate_prompt_tokens, | 
|  | 25 | +    get_base_url, | 
|  | 26 | +    get_extra_attributes, | 
|  | 27 | +    get_langtrace_attributes, | 
|  | 28 | +    get_llm_request_attributes, | 
|  | 29 | +    get_llm_url, | 
|  | 30 | +    get_span_name, | 
|  | 31 | +    get_tool_calls, | 
|  | 32 | +    is_streaming, | 
|  | 33 | +    set_event_completion, | 
|  | 34 | +    set_span_attributes, | 
|  | 35 | +    set_usage_attributes, | 
|  | 36 | +) | 
| 28 | 37 | from langtrace_python_sdk.utils.silently_fail import silently_fail | 
| 29 | 38 | 
 | 
| 30 | 39 | 
 | 
|  | 40 | +def async_openai_responses_create(version: str, tracer: Tracer) -> Callable: | 
|  | 41 | +    """Wrap the `create` method of the `openai.AsyncResponse.create` class to trace it.""" | 
|  | 42 | + | 
|  | 43 | +    async def traced_method( | 
|  | 44 | +        wrapped: Callable, instance: Any, args: List[Any], kwargs: Dict[str, Any] | 
|  | 45 | +    ): | 
|  | 46 | +        input_value = kwargs.get("input") | 
|  | 47 | +        prompt = ( | 
|  | 48 | +            input_value[0] | 
|  | 49 | +            if isinstance(input_value, list) | 
|  | 50 | +            else [{"role": "user", "content": input_value}] | 
|  | 51 | +        ) | 
|  | 52 | +        service_provider = SERVICE_PROVIDERS["OPENAI"] | 
|  | 53 | +        span_attributes = { | 
|  | 54 | +            "instructions": kwargs.get("instructions"), | 
|  | 55 | +            **get_langtrace_attributes(version, service_provider, vendor_type="llm"), | 
|  | 56 | +            **get_llm_request_attributes( | 
|  | 57 | +                kwargs, | 
|  | 58 | +                operation_name="openai.responses.create", | 
|  | 59 | +                prompts=prompt, | 
|  | 60 | +            ), | 
|  | 61 | +        } | 
|  | 62 | +        with tracer.start_as_current_span( | 
|  | 63 | +            name="openai.responses.create", | 
|  | 64 | +            kind=SpanKind.CLIENT, | 
|  | 65 | +            context=set_span_in_context(trace.get_current_span()), | 
|  | 66 | +        ) as span: | 
|  | 67 | +            try: | 
|  | 68 | +                set_span_attributes(span, span_attributes) | 
|  | 69 | + | 
|  | 70 | +                response = await wrapped(*args, **kwargs) | 
|  | 71 | +                _set_openai_agentic_response_attributes(span, response) | 
|  | 72 | + | 
|  | 73 | +                return response | 
|  | 74 | +            except Exception as err: | 
|  | 75 | +                span.record_exception(err) | 
|  | 76 | +                raise | 
|  | 77 | + | 
|  | 78 | +    return traced_method | 
|  | 79 | + | 
|  | 80 | + | 
|  | 81 | +def openai_responses_create(version: str, tracer: Tracer) -> Callable: | 
|  | 82 | +    """Wrap the `create` method of the `openai.responses.create` class to trace it.""" | 
|  | 83 | + | 
|  | 84 | +    def traced_method( | 
|  | 85 | +        wrapped: Callable, instance: Any, args: List[Any], kwargs: Dict[str, Any] | 
|  | 86 | +    ): | 
|  | 87 | +        input_value = kwargs.get("input") | 
|  | 88 | +        prompt = ( | 
|  | 89 | +            input_value[0] | 
|  | 90 | +            if isinstance(input_value, list) | 
|  | 91 | +            else [{"role": "user", "content": input_value}] | 
|  | 92 | +        ) | 
|  | 93 | +        service_provider = SERVICE_PROVIDERS["OPENAI"] | 
|  | 94 | +        span_attributes = { | 
|  | 95 | +            "instructions": kwargs.get("instructions"), | 
|  | 96 | +            **get_langtrace_attributes(version, service_provider, vendor_type="llm"), | 
|  | 97 | +            **get_llm_request_attributes( | 
|  | 98 | +                kwargs, | 
|  | 99 | +                operation_name="openai.responses.create", | 
|  | 100 | +                prompts=prompt, | 
|  | 101 | +            ), | 
|  | 102 | +        } | 
|  | 103 | +        with tracer.start_as_current_span( | 
|  | 104 | +            name="openai.responses.create", | 
|  | 105 | +            kind=SpanKind.CLIENT, | 
|  | 106 | +            context=set_span_in_context(trace.get_current_span()), | 
|  | 107 | +        ) as span: | 
|  | 108 | +            try: | 
|  | 109 | +                set_span_attributes(span, span_attributes) | 
|  | 110 | + | 
|  | 111 | +                response = wrapped(*args, **kwargs) | 
|  | 112 | +                _set_openai_agentic_response_attributes(span, response) | 
|  | 113 | + | 
|  | 114 | +                print("3. Response", response) | 
|  | 115 | + | 
|  | 116 | +                return response | 
|  | 117 | +            except Exception as err: | 
|  | 118 | +                span.record_exception(err) | 
|  | 119 | +                raise | 
|  | 120 | + | 
|  | 121 | +    return traced_method | 
|  | 122 | + | 
|  | 123 | + | 
| 31 | 124 | def filter_valid_attributes(attributes): | 
| 32 | 125 |     """Filter attributes where value is not None, not an empty string, and not openai.NOT_GIVEN.""" | 
| 33 | 126 |     return { | 
| @@ -634,6 +727,21 @@ def extract_content(choice: Any) -> Union[str, List[Dict[str, Any]], Dict[str, A | 
| 634 | 727 |         return "" | 
| 635 | 728 | 
 | 
| 636 | 729 | 
 | 
|  | 730 | +def _set_openai_agentic_response_attributes(span: Span, response) -> None: | 
|  | 731 | +    set_span_attribute(span, SpanAttributes.LLM_RESPONSE_ID, response.id) | 
|  | 732 | +    set_span_attribute(span, SpanAttributes.LLM_RESPONSE_MODEL, response.model) | 
|  | 733 | +    set_event_completion(span, [{"role": "assistant", "content": response.output_text}]) | 
|  | 734 | +    set_usage_attributes( | 
|  | 735 | +        span, | 
|  | 736 | +        { | 
|  | 737 | +            "input_tokens": response.usage.input_tokens, | 
|  | 738 | +            "output_tokens": response.usage.output_tokens, | 
|  | 739 | +            "total_tokens": response.usage.total_tokens, | 
|  | 740 | +            "cached_tokens": response.usage.input_tokens_details["cached_tokens"], | 
|  | 741 | +        }, | 
|  | 742 | +    ) | 
|  | 743 | + | 
|  | 744 | + | 
| 637 | 745 | @silently_fail | 
| 638 | 746 | def _set_input_attributes( | 
| 639 | 747 |     span: Span, kwargs: ChatCompletionsCreateKwargs, attributes: LLMSpanAttributes | 
| @@ -707,5 +815,9 @@ def _set_response_attributes(span: Span, result: ResultType) -> None: | 
| 707 | 815 |             set_span_attribute( | 
| 708 | 816 |                 span, | 
| 709 | 817 |                 "gen_ai.usage.cached_tokens", | 
| 710 |  | -                result.usage.prompt_tokens_details.cached_tokens if result.usage.prompt_tokens_details else 0, | 
|  | 818 | +                ( | 
|  | 819 | +                    result.usage.prompt_tokens_details.cached_tokens | 
|  | 820 | +                    if result.usage.prompt_tokens_details | 
|  | 821 | +                    else 0 | 
|  | 822 | +                ), | 
| 711 | 823 |             ) | 
0 commit comments