Skip to content

Commit 91e7246

Browse files
Annhiluccopybara-github
authored andcommitted
fix: Keep chunk content history when thought summaries are enabled in the stream response
PiperOrigin-RevId: 775383091
1 parent 15b2144 commit 91e7246

File tree

4 files changed

+164
-0
lines changed

4 files changed

+164
-0
lines changed

google/genai/_extra_utils.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525

2626
from . import _common
2727
from . import _mcp_utils
28+
from . import _transformers as t
2829
from . import errors
2930
from . import types
3031
from ._adapters import McpToGenAiToolAdapter
@@ -506,3 +507,15 @@ async def parse_config_for_mcp_sessions(
506507
parsed_config_copy.tools.append(tool)
507508

508509
return parsed_config_copy, mcp_to_genai_tool_adapters
510+
511+
512+
def append_chunk_contents(
513+
contents: Union[types.ContentListUnion, types.ContentListUnionDict],
514+
chunk: types.GenerateContentResponse,
515+
) -> None:
516+
"""Appends the contents of the chunk to the contents list."""
517+
if chunk is not None and chunk.candidates is not None:
518+
chunk_content = chunk.candidates[0].content
519+
contents = t.t_contents(contents) # type: ignore[assignment]
520+
if isinstance(contents, list) and chunk_content is not None:
521+
contents.append(chunk_content) # type: ignore[arg-type]

google/genai/models.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6018,6 +6018,7 @@ def generate_content_stream(
60186018
# Yield chunks only if there's no function response parts.
60196019
for chunk in response:
60206020
if not function_map:
6021+
_extra_utils.append_chunk_contents(contents, chunk)
60216022
yield chunk
60226023
else:
60236024
if (
@@ -6030,6 +6031,7 @@ def generate_content_stream(
60306031
chunk, function_map
60316032
)
60326033
if not func_response_parts:
6034+
_extra_utils.append_chunk_contents(contents, chunk)
60336035
yield chunk
60346036

60356037
else:
@@ -6039,6 +6041,7 @@ def generate_content_stream(
60396041
chunk.automatic_function_calling_history = (
60406042
automatic_function_calling_history
60416043
)
6044+
_extra_utils.append_chunk_contents(contents, chunk)
60426045
yield chunk
60436046
if (
60446047
chunk is None
@@ -7547,6 +7550,7 @@ async def async_generator(model, contents, config): # type: ignore[no-untyped-d
75477550
# Yield chunks only if there's no function response parts.
75487551
async for chunk in response: # type: ignore[attr-defined]
75497552
if not function_map:
7553+
_extra_utils.append_chunk_contents(contents, chunk)
75507554
yield chunk
75517555
else:
75527556
if (
@@ -7561,6 +7565,7 @@ async def async_generator(model, contents, config): # type: ignore[no-untyped-d
75617565
)
75627566
)
75637567
if not func_response_parts:
7568+
_extra_utils.append_chunk_contents(contents, chunk)
75647569
yield chunk
75657570

75667571
else:
@@ -7571,6 +7576,7 @@ async def async_generator(model, contents, config): # type: ignore[no-untyped-d
75717576
chunk.automatic_function_calling_history = (
75727577
automatic_function_calling_history
75737578
)
7579+
_extra_utils.append_chunk_contents(contents, chunk)
75747580
yield chunk
75757581
if (
75767582
chunk is None

google/genai/tests/afc/test_generate_content_stream_afc.py

Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -326,6 +326,39 @@ def test_generate_content_stream_with_function_tools_used(
326326
) == TEST_AFC_HISTORY[i].model_dump(exclude_none=True)
327327

328328

329+
def test_generate_content_stream_with_thought_summaries(
330+
mock_generate_content_stream_with_afc,
331+
mock_get_function_response_parts,
332+
):
333+
"""Test when function tools are provided and thought summaries are enabled.
334+
335+
Expected to answer weather based on function response.
336+
"""
337+
models_instance = models.Models(api_client_=mock_api_client)
338+
config = types.GenerateContentConfig(
339+
tools=[get_current_weather],
340+
thinking_config=types.ThinkingConfig(include_thoughts=True),
341+
)
342+
stream = models_instance.generate_content_stream(
343+
model='test_model',
344+
contents='what is the weather in San Francisco?',
345+
config=config,
346+
)
347+
348+
chunk = None
349+
for chunk in stream:
350+
assert chunk.text == TEST_AFC_TEXT_PART.text
351+
352+
assert mock_generate_content_stream_with_afc.call_count == 2
353+
assert mock_get_function_response_parts.call_count == 2
354+
355+
assert chunk is not None
356+
for i in range(len(chunk.automatic_function_calling_history)):
357+
assert chunk.automatic_function_calling_history[i].model_dump(
358+
exclude_none=True
359+
) == TEST_AFC_HISTORY[i].model_dump(exclude_none=True)
360+
361+
329362
@pytest.mark.asyncio
330363
async def test_generate_content_stream_no_function_map_async(
331364
mock_generate_content_stream_no_afc,
@@ -460,3 +493,38 @@ async def test_generate_content_stream_with_function_async_function_used_async(
460493
assert chunk.automatic_function_calling_history[i].model_dump(
461494
exclude_none=True
462495
) == TEST_AFC_HISTORY[i].model_dump(exclude_none=True)
496+
497+
498+
@pytest.mark.asyncio
499+
async def test_generate_content_stream_with_thought_summaries_async(
500+
mock_generate_content_stream_with_afc_async,
501+
mock_get_function_response_parts_async,
502+
):
503+
"""Test when function tools are provided and thought summaries are enabled.
504+
505+
Expected to answer weather based on function response.
506+
"""
507+
models_instance = models.AsyncModels(api_client_=mock_api_client)
508+
config = types.GenerateContentConfig(
509+
tools=[get_current_weather],
510+
thinking_config=types.ThinkingConfig(include_thoughts=True),
511+
)
512+
stream = await models_instance.generate_content_stream(
513+
model='test_model',
514+
contents='what is the weather in San Francisco?',
515+
config=config,
516+
)
517+
518+
chunk = None
519+
async for chunk in stream:
520+
assert chunk.text == TEST_AFC_TEXT_PART.text
521+
522+
assert mock_generate_content_stream_with_afc_async.call_count == 2
523+
524+
assert mock_get_function_response_parts_async.call_count == 2
525+
526+
assert chunk is not None
527+
for i in range(len(chunk.automatic_function_calling_history)):
528+
assert chunk.automatic_function_calling_history[i].model_dump(
529+
exclude_none=True
530+
) == TEST_AFC_HISTORY[i].model_dump(exclude_none=True)
Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
# Copyright 2025 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
#
15+
import pytest
16+
from ... import types
17+
from .. import pytest_helper
18+
19+
20+
def get_current_weather(location: str) -> str:
21+
"""Returns the current weather.
22+
23+
Args:
24+
location: The location of a city and state, e.g. "San Francisco, CA".
25+
"""
26+
return 'windy'
27+
28+
29+
pytestmark = pytest_helper.setup(
30+
file=__file__,
31+
globals_for_file=globals(),
32+
test_method='models.generate_content',
33+
)
34+
pytest_plugins = ('pytest_asyncio',)
35+
36+
37+
def test_generate_content_stream_with_function_and_thought_summaries(client):
38+
"""Test when function tools are provided and thought summaries are enabled.
39+
40+
Expected to answer weather based on function response.
41+
"""
42+
config = types.GenerateContentConfig(
43+
tools=[get_current_weather],
44+
thinking_config=types.ThinkingConfig(include_thoughts=True),
45+
)
46+
stream = client.models.generate_content_stream(
47+
model='gemini-2.5-flash',
48+
contents='what is the weather in San Francisco, CA?',
49+
config=config,
50+
)
51+
52+
chunk = None
53+
for chunk in stream:
54+
assert chunk is not None
55+
56+
57+
@pytest.mark.asyncio
58+
async def test_generate_content_stream_with_function_and_thought_summaries_async(
59+
client,
60+
):
61+
"""Test when function tools are provided and thought summaries are enabled.
62+
63+
Expected to answer weather based on function response.
64+
"""
65+
config = types.GenerateContentConfig(
66+
tools=[get_current_weather],
67+
thinking_config=types.ThinkingConfig(include_thoughts=True),
68+
)
69+
stream = await client.aio.models.generate_content_stream(
70+
model='gemini-2.5-flash',
71+
contents='what is the weather in San Francisco, CA?',
72+
config=config,
73+
)
74+
75+
chunk = None
76+
async for chunk in stream:
77+
assert chunk is not None

0 commit comments

Comments
 (0)