Skip to content

Commit 8d3b769

Browse files
committed
support genai and also add token reporting
1 parent 587494e commit 8d3b769

File tree

7 files changed

+85
-32
lines changed

7 files changed

+85
-32
lines changed

src/examples/langchain_example/__init__.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
from examples.langchain_example.langchain_google_genai import basic_google_genai
12
from .basic import basic_app, rag, load_and_split
23
from langtrace_python_sdk import with_langtrace_root_span
34

@@ -8,10 +9,11 @@
89
class LangChainRunner:
910
@with_langtrace_root_span("LangChain")
1011
def run(self):
11-
basic_app()
12-
rag()
13-
load_and_split()
14-
basic_graph_tools()
12+
# basic_app()
13+
# rag()
14+
# load_and_split()
15+
# basic_graph_tools()
16+
basic_google_genai()
1517

1618

1719
class GroqRunner:
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
from langchain_core.messages import HumanMessage
2+
from langchain_google_genai import ChatGoogleGenerativeAI
3+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
4+
5+
6+
@with_langtrace_root_span("basic_google_genai")
7+
def basic_google_genai():
8+
llm = ChatGoogleGenerativeAI(model="gemini-1.5-flash")
9+
# example
10+
message = HumanMessage(
11+
content=[
12+
{
13+
"type": "text",
14+
"text": "What's in this image?",
15+
},
16+
]
17+
)
18+
message_image = HumanMessage(content="https://picsum.photos/seed/picsum/200/300")
19+
20+
res = llm.invoke([message, message_image])
21+
# print(res)

src/langtrace_python_sdk/instrumentation/langchain/instrumentation.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -81,21 +81,16 @@ def _instrument(self, **kwargs):
8181
tracer_provider = kwargs.get("tracer_provider")
8282
tracer = get_tracer(__name__, "", tracer_provider)
8383
version = importlib.metadata.version("langchain")
84-
8584
wrap_function_wrapper(
8685
"langchain.agents.agent",
8786
"RunnableAgent.plan",
88-
generic_patch(
89-
"RunnableAgent.plan", "plan", tracer, version, True, True
90-
),
87+
generic_patch("RunnableAgent.plan", "plan", tracer, version, True, True),
9188
)
9289

9390
wrap_function_wrapper(
9491
"langchain.agents.agent",
9592
"RunnableAgent.aplan",
96-
generic_patch(
97-
"RunnableAgent.aplan", "plan", tracer, version, True, True
98-
),
93+
generic_patch("RunnableAgent.aplan", "plan", tracer, version, True, True),
9994
)
10095

10196
# modules_to_patch = []

src/langtrace_python_sdk/instrumentation/langchain_core/instrumentation.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,20 @@ def _instrument(self, **kwargs):
134134
]
135135

136136
modules_to_patch = [
137+
(
138+
"langchain_core.language_models.chat_models",
139+
"chatmodel",
140+
generic_patch,
141+
True,
142+
True,
143+
),
144+
(
145+
"langchain_core.language_models.base",
146+
"language_model",
147+
generic_patch,
148+
True,
149+
True,
150+
),
137151
("langchain_core.retrievers", "retriever", generic_patch, True, True),
138152
("langchain_core.prompts.chat", "prompt", generic_patch, True, True),
139153
(

src/langtrace_python_sdk/instrumentation/langchain_core/patch.py

Lines changed: 38 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,24 @@ def traced_method(wrapped, instance, args, kwargs):
5757
"langtrace.service.version": version,
5858
"langtrace.version": v(LANGTRACE_SDK_NAME),
5959
"langchain.task.name": task,
60-
**(extra_attributes if extra_attributes is not None else {}),
60+
"gen_ai.request.model": (
61+
instance.model if hasattr(instance, "model") else None
62+
),
63+
SpanAttributes.LLM_REQUEST_MAX_TOKENS: (
64+
instance.max_output_tokens
65+
if hasattr(instance, "max_output_tokens")
66+
else None
67+
),
68+
SpanAttributes.LLM_TOP_K: (
69+
instance.top_k if hasattr(instance, "top_k") else None
70+
),
71+
SpanAttributes.LLM_REQUEST_TOP_P: (
72+
instance.top_p if hasattr(instance, "top_p") else None
73+
),
74+
SpanAttributes.LLM_REQUEST_TEMPERATURE: (
75+
instance.temperature if hasattr(instance, "temperature") else None
76+
),
77+
**(extra_attributes if extra_attributes is not None else {}), # type: ignore
6178
}
6279

6380
if trace_input and len(args) > 0:
@@ -79,21 +96,17 @@ def traced_method(wrapped, instance, args, kwargs):
7996
try:
8097
# Attempt to call the original method
8198
result = wrapped(*args, **kwargs)
82-
8399
if trace_output:
84100
span.set_attribute("langchain.outputs", to_json_string(result))
85-
if hasattr(result, 'usage'):
86-
prompt_tokens = result.usage.prompt_tokens
87-
completion_tokens = result.usage.completion_tokens
88-
span.set_attribute(SpanAttributes.LLM_USAGE_PROMPT_TOKENS, prompt_tokens)
89-
span.set_attribute(SpanAttributes.LLM_USAGE_COMPLETION_TOKENS, completion_tokens)
90-
elif hasattr(result, 'generations') and len(result.generations) > 0 and len(result.generations[0]) > 0 and hasattr(result.generations[0][0], 'text') and isinstance(result.generations[0][0].text, str):
91-
span.set_attribute(SpanAttributes.LLM_USAGE_COMPLETION_TOKENS, instance.get_num_tokens(result.generations[0][0].text))
92-
elif len(args) > 0 and len(args[0]) > 0 and not hasattr(args[0][0], 'text') and hasattr(instance, 'get_num_tokens'):
93-
span.set_attribute(SpanAttributes.LLM_USAGE_PROMPT_TOKENS, instance.get_num_tokens(args[0][0]))
94-
elif len(args) > 0 and len(args[0]) > 0 and hasattr(args[0][0], 'text') and isinstance(args[0][0].text, str) and hasattr(instance, 'get_num_tokens'):
95-
span.set_attribute(SpanAttributes.LLM_USAGE_PROMPT_TOKENS, instance.get_num_tokens(args[0][0].text))
96-
101+
if hasattr(result, "usage_metadata"):
102+
span.set_attribute(
103+
SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
104+
result.usage_metadata["input_tokens"],
105+
)
106+
span.set_attribute(
107+
SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
108+
result.usage_metadata["output_tokens"],
109+
)
97110
span.set_status(StatusCode.OK)
98111
return result
99112
except Exception as err:
@@ -208,9 +221,17 @@ def clean_empty(d):
208221
if not isinstance(d, (dict, list, tuple)):
209222
return d
210223
if isinstance(d, tuple):
211-
return tuple(val for val in (clean_empty(val) for val in d) if val != () and val is not None)
224+
return tuple(
225+
val
226+
for val in (clean_empty(val) for val in d)
227+
if val != () and val is not None
228+
)
212229
if isinstance(d, list):
213-
return [val for val in (clean_empty(val) for val in d) if val != [] and val is not None]
230+
return [
231+
val
232+
for val in (clean_empty(val) for val in d)
233+
if val != [] and val is not None
234+
]
214235
result = {}
215236
for k, val in d.items():
216237
if isinstance(val, dict):
@@ -226,7 +247,7 @@ def clean_empty(d):
226247
result[k] = val.strip()
227248
elif isinstance(val, object):
228249
# some langchain objects have a text attribute
229-
val = getattr(val, 'text', None)
250+
val = getattr(val, "text", None)
230251
if val is not None and val.strip() != "":
231252
result[k] = val.strip()
232253
return result

src/langtrace_python_sdk/langtrace.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -128,8 +128,8 @@ def init(
128128
"embedchain": EmbedchainInstrumentation(),
129129
"qdrant-client": QdrantInstrumentation(),
130130
"langchain": LangchainInstrumentation(),
131-
"langchain-core": LangchainCoreInstrumentation(),
132-
"langchain-community": LangchainCommunityInstrumentation(),
131+
"langchain_core": LangchainCoreInstrumentation(),
132+
"langchain_community": LangchainCommunityInstrumentation(),
133133
"langgraph": LanggraphInstrumentation(),
134134
"anthropic": AnthropicInstrumentation(),
135135
"cohere": CohereInstrumentation(),
@@ -190,7 +190,7 @@ def init(
190190

191191

192192
def init_instrumentations(
193-
disable_instrumentations: DisableInstrumentations, all_instrumentations: dict
193+
disable_instrumentations: DisableInstrumentations | None, all_instrumentations: dict
194194
):
195195
if disable_instrumentations is None:
196196
for idx, (name, v) in enumerate(all_instrumentations.items()):

src/run_example.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
ENABLED_EXAMPLES = {
44
"anthropic": False,
5-
"azureopenai": True,
5+
"azureopenai": False,
66
"chroma": False,
77
"cohere": False,
88
"fastapi": False,

0 commit comments

Comments
 (0)