Skip to content

Commit 8079db7

Browse files
woofy0Duong. Sinclair
andauthored
Fix TypeError: unhashable type: 'dict' in Ollama stream chat with tools (#19938)
* Fixed exception thrown when stream_chat is used with tools and tries to call set on a dict * updated version * linting --------- Co-authored-by: Duong. Sinclair <[email protected]>
1 parent 98b6b7f commit 8079db7

File tree

3 files changed

+22
-2
lines changed

3 files changed

+22
-2
lines changed

llama-index-integrations/llms/llama-index-llms-ollama/llama_index/llms/ollama/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -427,7 +427,7 @@ def gen() -> ChatResponseGen:
427427
content=response_txt,
428428
role=r["message"].get("role", MessageRole.ASSISTANT),
429429
additional_kwargs={
430-
"tool_calls": list(set(all_tool_calls)),
430+
"tool_calls": all_tool_calls,
431431
"thinking": thinking_txt,
432432
},
433433
),

llama-index-integrations/llms/llama-index-llms-ollama/pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ dev = [
2727

2828
[project]
2929
name = "llama-index-llms-ollama"
30-
version = "0.7.3"
30+
version = "0.7.4"
3131
description = "llama-index llms ollama integration"
3232
authors = [{name = "Your Name", email = "[email protected]"}]
3333
requires-python = ">=3.9,<4.0"

llama-index-integrations/llms/llama-index-llms-ollama/tests/test_llms_ollama.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -161,6 +161,26 @@ def test_chat_with_tools() -> None:
161161
assert isinstance(tool_result.raw_output, Song)
162162

163163

164+
@pytest.mark.skipif(
165+
client is None, reason="Ollama client is not available or test model is missing"
166+
)
167+
def test_stream_chat_with_tools() -> None:
168+
"""Makes sure that stream chat with tools returns tool call message without any errors"""
169+
llm = Ollama(model=test_model, context_window=8000)
170+
response = llm.stream_chat_with_tools(
171+
[tool], user_msg="Hello! Generate a random artist and song."
172+
)
173+
174+
for r in response:
175+
tool_calls = llm.get_tool_calls_from_response(r)
176+
assert len(tool_calls) == 1
177+
assert tool_calls[0].tool_name == tool.metadata.name
178+
179+
tool_result = tool(**tool_calls[0].tool_kwargs)
180+
assert tool_result.raw_output is not None
181+
assert isinstance(tool_result.raw_output, Song)
182+
183+
164184
@pytest.mark.skipif(
165185
client is None, reason="Ollama client is not available or test model is missing"
166186
)

0 commit comments

Comments
 (0)