@@ -157,7 +157,7 @@ async def test_async_pipeline_with_chat_prompt_builder_and_chat_generator_produc
157157 span = spans [1 ]
158158 assert span .status .is_ok
159159 assert not span .events
160- assert span .name == "OpenAIChatGenerator.run "
160+ assert span .name == "OpenAIChatGenerator.run_async "
161161 attributes = dict (span .attributes or {})
162162 assert attributes .pop (OPENINFERENCE_SPAN_KIND ) == LLM
163163 assert attributes .pop (INPUT_MIME_TYPE ) == JSON
@@ -393,7 +393,7 @@ async def test_haystack_instrumentation_async_pipeline_filtering(
393393 spans = in_memory_span_exporter .get_finished_spans ()
394394
395395 assert [span .name for span in spans ] == [
396- "InMemoryBM25Retriever.run " ,
396+ "InMemoryBM25Retriever.run_async " ,
397397 "AsyncPipeline.run_async_generator" ,
398398 "AsyncPipeline.run_async" ,
399399 ]
@@ -539,13 +539,13 @@ def test_async_pipeline_tool_calling_llm_span_has_expected_attributes(
539539 spans = in_memory_span_exporter .get_finished_spans ()
540540 assert len (spans ) == 4
541541 assert [span .name for span in spans ] == [
542- "OpenAIChatGenerator.run " ,
542+ "OpenAIChatGenerator.run_async " ,
543543 "AsyncPipeline.run_async_generator" ,
544544 "AsyncPipeline.run_async" ,
545545 "AsyncPipeline.run" ,
546546 ]
547547 span = spans [0 ]
548- assert span .name == "OpenAIChatGenerator.run "
548+ assert span .name == "OpenAIChatGenerator.run_async "
549549 assert span .status .is_ok
550550 assert not span .events
551551 attributes = dict (span .attributes or {})
@@ -701,13 +701,13 @@ async def test_async_pipeline_openai_chat_generator_llm_span_has_expected_attrib
701701 spans = in_memory_span_exporter .get_finished_spans ()
702702 assert len (spans ) == 2
703703 assert [span .name for span in spans ] == [
704- "OpenAIChatGenerator.run " ,
704+ "OpenAIChatGenerator.run_async " ,
705705 "AsyncPipeline.run_async_generator" ,
706706 ]
707707 span = spans [0 ]
708708 assert span .status .is_ok
709709 assert not span .events
710- assert span .name == "OpenAIChatGenerator.run "
710+ assert span .name == "OpenAIChatGenerator.run_async "
711711 attributes = dict (span .attributes or {})
712712 assert attributes .pop (OPENINFERENCE_SPAN_KIND ) == "LLM"
713713 assert (
@@ -1182,16 +1182,21 @@ def test_pipeline_and_component_spans_contain_context_attributes(
11821182 assert attributes .get (LLM_PROMPT_TEMPLATE_VARIABLES , '{"var_name": "var-value"}' )
11831183
11841184
1185+ @pytest .mark .parametrize ("use_async" , [False , True ])
11851186@pytest .mark .vcr (
11861187 decode_compressed_response = True ,
11871188 before_record_request = remove_all_vcr_request_headers ,
11881189 before_record_response = remove_all_vcr_response_headers ,
1190+ record_mode = "once" ,
11891191)
1190- def test_agent_run_component_spans (
1192+ async def test_agent_run_component_spans (
11911193 openai_api_key : str ,
11921194 in_memory_span_exporter : InMemorySpanExporter ,
11931195 setup_haystack_instrumentation : Any ,
1196+ use_async : bool ,
11941197) -> None :
1198+ run_method = "run_async" if use_async else "run"
1199+
11951200 def search_documents (query : str , user_context : str ) -> Dict [str , Any ]:
11961201 """Search documents using query and user context."""
11971202 return {"results" : [f"Found results for '{ query } ' (user: { user_context } )" ]}
@@ -1212,10 +1217,14 @@ def search_documents(query: str, user_context: str) -> Dict[str, Any]:
12121217 tools = [search_tool ],
12131218 state_schema = {"user_name" : {"type" : str }, "search_results" : {"type" : list }},
12141219 )
1215-
1216- result = agent .run (
1217- messages = [ChatMessage .from_user ("Search for Python tutorials" )], user_name = "Alice"
1218- )
1220+ if use_async :
1221+ result = await agent .run_async (
1222+ messages = [ChatMessage .from_user ("Search for Python tutorials" )], user_name = "Alice"
1223+ )
1224+ else :
1225+ result = agent .run (
1226+ messages = [ChatMessage .from_user ("Search for Python tutorials" )], user_name = "Alice"
1227+ )
12191228 last_message = result ["last_message" ]
12201229 assert last_message .role .name == "ASSISTANT"
12211230 assert last_message .text == (
@@ -1225,7 +1234,7 @@ def search_documents(query: str, user_context: str) -> Dict[str, Any]:
12251234 spans = in_memory_span_exporter .get_finished_spans ()
12261235 assert len (spans ) == 4
12271236 openai_span = spans [0 ]
1228- assert openai_span .name == "OpenAIChatGenerator.run "
1237+ assert openai_span .name == f "OpenAIChatGenerator.{ run_method } "
12291238 assert openai_span .status .is_ok
12301239 attributes = dict (openai_span .attributes or {})
12311240 assert attributes .pop (OPENINFERENCE_SPAN_KIND ) == "LLM"
@@ -1253,7 +1262,7 @@ def search_documents(query: str, user_context: str) -> Dict[str, Any]:
12531262 assert prompt_tokens + completion_tokens == total_tokens
12541263 assert not attributes
12551264 tool_invoker_span = spans [1 ]
1256- assert tool_invoker_span .name == "ToolInvoker.run "
1265+ assert tool_invoker_span .name == f "ToolInvoker.{ run_method } "
12571266 assert tool_invoker_span .status .is_ok
12581267 attributes = dict (tool_invoker_span .attributes or {})
12591268 assert attributes .pop (OPENINFERENCE_SPAN_KIND ) == "CHAIN"
@@ -1263,7 +1272,7 @@ def search_documents(query: str, user_context: str) -> Dict[str, Any]:
12631272 assert isinstance (attributes .pop (OUTPUT_VALUE ), str )
12641273 assert not attributes
12651274 openai_span = spans [2 ]
1266- assert openai_span .name == "OpenAIChatGenerator.run "
1275+ assert openai_span .name == f "OpenAIChatGenerator.{ run_method } "
12671276 assert openai_span .status .is_ok
12681277 attributes = dict (openai_span .attributes or {})
12691278 assert attributes .pop (OPENINFERENCE_SPAN_KIND ) == "LLM"
@@ -1287,7 +1296,7 @@ def search_documents(query: str, user_context: str) -> Dict[str, Any]:
12871296 assert prompt_tokens + completion_tokens == total_tokens
12881297 assert not attributes
12891298 agent_run_span = spans [3 ] # root span
1290- assert agent_run_span .name == "Agent.run "
1299+ assert agent_run_span .name == f "Agent.{ run_method } "
12911300 assert agent_run_span .status .is_ok
12921301 attributes = dict (agent_run_span .attributes or {})
12931302 assert attributes .pop (OPENINFERENCE_SPAN_KIND ) == "CHAIN"
@@ -1355,6 +1364,65 @@ def test_individual_component_without_child_components(
13551364 assert not attributes
13561365
13571366
1367+ @pytest .mark .vcr (
1368+ decode_compressed_response = True ,
1369+ before_record_request = remove_all_vcr_request_headers ,
1370+ before_record_response = remove_all_vcr_response_headers ,
1371+ )
1372+ async def test_individual_component_run_async_without_child_components (
1373+ in_memory_span_exporter : InMemorySpanExporter ,
1374+ setup_haystack_instrumentation : Any ,
1375+ ) -> None :
1376+ document_store = InMemoryDocumentStore ()
1377+ documents = [
1378+ Document (content = "There are over 7,000 languages spoken around the world today." ),
1379+ Document (
1380+ content = "Elephants have been observed to behave in a way that indicates "
1381+ "a high level of self-awareness, such as recognizing themselves "
1382+ "in mirrors."
1383+ ),
1384+ Document (
1385+ content = "In certain parts of the world, like the Maldives, Puerto Rico, "
1386+ "and San Diego, you can witness the phenomenon of bioluminescent"
1387+ " waves."
1388+ ),
1389+ ]
1390+ document_store .write_documents (documents = documents )
1391+
1392+ retriever = InMemoryBM25Retriever (document_store = document_store )
1393+ results = await retriever .run_async (
1394+ query = "How many languages are spoken around the world today?"
1395+ )
1396+ assert results .get ("documents" ) is not None
1397+ assert len (results ["documents" ]) == 3
1398+ for document in results ["documents" ]:
1399+ assert isinstance (document , Document )
1400+ assert document .id is not None
1401+ assert document .content_type == "text"
1402+ assert isinstance (document .content , str )
1403+ spans = in_memory_span_exporter .get_finished_spans ()
1404+ assert len (spans ) == 1
1405+ retriever_span = spans [0 ]
1406+ assert retriever_span .name == "InMemoryBM25Retriever.run_async"
1407+ assert retriever_span .status .is_ok
1408+ attributes = dict (retriever_span .attributes or {})
1409+ assert attributes .pop (OPENINFERENCE_SPAN_KIND ) == "RETRIEVER"
1410+ assert attributes .pop (INPUT_MIME_TYPE ) == JSON
1411+ assert isinstance (attributes .pop (INPUT_VALUE ), str )
1412+ assert attributes .pop (OUTPUT_MIME_TYPE ) == JSON
1413+ assert isinstance (attributes .pop (OUTPUT_VALUE ), str )
1414+ for i , document in enumerate (results ["documents" ]):
1415+ prefix = f"{ RETRIEVAL_DOCUMENTS } .{ i } "
1416+ assert isinstance (content := attributes .pop (f"{ prefix } .{ DOCUMENT_CONTENT } " ), str )
1417+ assert content == document .content
1418+ assert isinstance (doc_id := attributes .pop (f"{ prefix } .{ DOCUMENT_ID } " ), str )
1419+ assert doc_id == document .id
1420+ assert isinstance (score := attributes .pop (f"{ prefix } .{ DOCUMENT_SCORE } " ), float )
1421+ assert score == document .score
1422+ assert isinstance (attributes .pop (f"{ prefix } .{ DOCUMENT_METADATA } " ), str )
1423+ assert not attributes
1424+
1425+
13581426@pytest .fixture
13591427def openai_api_key (monkeypatch : pytest .MonkeyPatch ) -> None :
13601428 monkeypatch .setenv ("OPENAI_API_KEY" , "sk-" )
0 commit comments