Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 11 additions & 7 deletions src/strands/experimental/bidi/models/gemini_live.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def _resolve_client_config(self, config: dict[str, Any]) -> dict[str, Any]:

# Set default http_options if not provided
if "http_options" not in resolved:
resolved["http_options"] = {"api_version": "v1alpha"}
resolved["http_options"] = {"api_version": "v1beta"}

return resolved

Expand Down Expand Up @@ -429,12 +429,14 @@ async def _send_image_content(self, image_input: BidiImageInputEvent) -> None:
await self._live_session.send(input=msg)

async def _send_text_content(self, text: str) -> None:
"""Internal: Send text content using Gemini Live API."""
# Create content with text
content = genai_types.Content(role="user", parts=[genai_types.Part(text=text)])
"""Internal: Send text content using Gemini Live API.

# Send as client content
await self._live_session.send_client_content(turns=content)
Uses send_realtime_input for text delivery. Gemini 3.1+ models only
accept send_client_content for seeding initial context history — mid-session
text must go through send_realtime_input. This path is also compatible with
Gemini 2.5 models.
"""
await self._live_session.send_realtime_input(text=text)

async def _send_tool_result(self, tool_result: ToolResult) -> None:
"""Internal: Send tool result using Gemini Live API."""
Expand Down Expand Up @@ -491,7 +493,9 @@ def _build_live_config(
"""
config_dict: dict[str, Any] = self.config["inference"].copy()

config_dict["session_resumption"] = {"handle": kwargs.get("live_session_handle")}
live_session_handle = kwargs.get("live_session_handle")
if live_session_handle is not None:
config_dict["session_resumption"] = {"handle": live_session_handle}

# Add system instruction if provided
if system_prompt:
Expand Down
11 changes: 5 additions & 6 deletions tests/strands/experimental/bidi/models/test_gemini_live.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,14 +198,13 @@ async def test_send_all_content_types(mock_genai_client, model):
_, mock_live_session, _ = mock_genai_client
await model.start()

# Test text input
# Test text input — routed through send_realtime_input (Gemini 3.1 compatible)
text_input = BidiTextInputEvent(text="Hello", role="user")
await model.send(text_input)
mock_live_session.send_client_content.assert_called_once()
call_args = mock_live_session.send_client_content.call_args
content = call_args.kwargs.get("turns")
assert content.role == "user"
assert content.parts[0].text == "Hello"
mock_live_session.send_realtime_input.assert_called_with(text="Hello")

# Reset for next assertion
mock_live_session.send_realtime_input.reset_mock()

# Test audio input (base64 encoded)
audio_b64 = base64.b64encode(b"audio_bytes").decode("utf-8")
Expand Down