Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion src/strands/models/gemini.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,6 +426,8 @@ async def stream(
yield self._format_chunk({"chunk_type": "content_start", "data_type": "text"})

tool_used = False
candidate = None
event = None
async for event in response:
candidates = event.candidates
candidate = candidates[0] if candidates else None
Expand Down Expand Up @@ -455,7 +457,8 @@ async def stream(
"data": "TOOL_USE" if tool_used else (candidate.finish_reason if candidate else "STOP"),
}
)
yield self._format_chunk({"chunk_type": "metadata", "data": event.usage_metadata})
if event:
yield self._format_chunk({"chunk_type": "metadata", "data": event.usage_metadata})

except genai.errors.ClientError as error:
if not error.message:
Expand Down
19 changes: 19 additions & 0 deletions tests/strands/models/test_gemini.py
Original file line number Diff line number Diff line change
Expand Up @@ -566,6 +566,25 @@ async def test_stream_response_none_candidates(gemini_client, model, messages, a
assert tru_chunks == exp_chunks


@pytest.mark.asyncio
async def test_stream_response_empty_stream(gemini_client, model, messages, agenerator, alist):
"""Test that empty stream doesn't raise UnboundLocalError.

When the stream yields no events, the candidate variable must be initialized
to None to avoid UnboundLocalError when referenced in message_stop chunk.
"""
gemini_client.aio.models.generate_content_stream.return_value = agenerator([])

tru_chunks = await alist(model.stream(messages))
exp_chunks = [
{"messageStart": {"role": "assistant"}},
{"contentBlockStart": {"start": {}}},
{"contentBlockStop": {}},
{"messageStop": {"stopReason": "end_turn"}},
]
assert tru_chunks == exp_chunks


@pytest.mark.asyncio
async def test_stream_response_throttled_exception(gemini_client, model, messages):
gemini_client.aio.models.generate_content_stream.side_effect = genai.errors.ClientError(
Expand Down
Loading