Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions ccproxy/llms/formatters/anthropic_to_openai/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,3 +42,33 @@ def build_openai_tool_call(
arguments=str(args_str),
),
)


def build_openai_tool_call_chunk(
*,
index: int,
tool_id: str | None,
tool_name: str | None,
tool_input: Any,
arguments: Any = None,
fallback_index: int = 0,
) -> openai_models.ToolCallChunk:
args_str = (
arguments
if isinstance(arguments, str) and arguments
else serialize_tool_arguments(tool_input)
)
call_id = (
tool_id if isinstance(tool_id, str) and tool_id else f"call_{fallback_index}"
)
name = tool_name if isinstance(tool_name, str) and tool_name else "function"

return openai_models.ToolCallChunk(
index=index,
id=str(call_id),
type="function",
function=openai_models.FunctionCall(
name=str(name),
arguments=str(args_str),
),
)
11 changes: 6 additions & 5 deletions ccproxy/llms/formatters/anthropic_to_openai/streams.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from ccproxy.llms.models import openai as openai_models
from ccproxy.llms.streaming.accumulators import ClaudeAccumulator

from ._helpers import build_openai_tool_call
from ._helpers import build_openai_tool_call_chunk
from .requests import _build_responses_payload_from_anthropic_request
from .responses import convert__anthropic_usage_to_openai_responses__usage

Expand Down Expand Up @@ -88,10 +88,10 @@ def _anthropic_delta_to_text(
return None


def _build_openai_tool_call(
def _build_openai_tool_call_chunk(
accumulator: ClaudeAccumulator,
block_index: int,
) -> openai_models.ToolCall | None:
) -> openai_models.ToolCallChunk | None:
for tool_call in accumulator.get_complete_tool_calls():
if tool_call.get("index") != block_index:
continue
Expand All @@ -102,7 +102,8 @@ def _build_openai_tool_call(
tool_name = function_payload.get("name") or tool_call.get("name")
arguments = function_payload.get("arguments")

return build_openai_tool_call(
return build_openai_tool_call_chunk(
index=tool_call.get("index", block_index),
tool_id=tool_call.get("id"),
tool_name=tool_name,
tool_input=tool_call.get("input", {}),
Expand Down Expand Up @@ -1413,7 +1414,7 @@ async def generator() -> AsyncGenerator[
continue
if block_index in emitted_tool_indices:
continue
tool_call = _build_openai_tool_call(accumulator, block_index)
tool_call = _build_openai_tool_call_chunk(accumulator, block_index)
if tool_call is None:
continue
emitted_tool_indices.add(block_index)
Expand Down
15 changes: 10 additions & 5 deletions ccproxy/llms/formatters/openai_to_openai/streams.py
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,8 @@ def create_text_chunk(

# Emit initial tool call chunk to surface id/name information
if not state.initial_emitted:
tool_call = openai_models.ToolCall(
tool_call = openai_models.ToolCallChunk(
index=state.index,
id=state.id,
type="function",
function=openai_models.FunctionCall(
Expand Down Expand Up @@ -442,7 +443,8 @@ def create_text_chunk(
state.name = guessed

if state.initial_emitted:
tool_call = openai_models.ToolCall(
tool_call = openai_models.ToolCallChunk(
index=state.index,
id=state.id,
type="function",
function=openai_models.FunctionCall(
Expand Down Expand Up @@ -494,7 +496,8 @@ def create_text_chunk(
if guessed:
state.name = guessed

tool_call = openai_models.ToolCall(
tool_call = openai_models.ToolCallChunk(
index=state.index,
id=state.id,
type="function",
function=openai_models.FunctionCall(
Expand Down Expand Up @@ -586,7 +589,8 @@ def create_text_chunk(
if guessed:
state.name = guessed
if not state.arguments_emitted:
tool_call = openai_models.ToolCall(
tool_call = openai_models.ToolCallChunk(
index=state.index,
id=state.id,
type="function",
function=openai_models.FunctionCall(
Expand Down Expand Up @@ -616,7 +620,8 @@ def create_text_chunk(

# Emit a patch chunk if the name was never surfaced earlier
if state.name and not state.name_emitted:
tool_call = openai_models.ToolCall(
tool_call = openai_models.ToolCallChunk(
index=state.index,
id=state.id,
type="function",
function=openai_models.FunctionCall(
Expand Down
13 changes: 12 additions & 1 deletion ccproxy/llms/models/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,11 +185,22 @@ class FunctionCall(LlmBaseModel):


class ToolCall(LlmBaseModel):
"""Non-streaming tool call (ChatCompletionMessageToolCall)."""

id: str
type: Literal["function"] = Field(default="function")
function: FunctionCall


class ToolCallChunk(LlmBaseModel):
"""Streaming tool call delta (ChoiceDeltaToolCall)."""

index: int
id: str | None = None
type: Literal["function"] | None = None
function: FunctionCall | None = None


class ChatMessage(LlmBaseModel):
"""
A message within a chat conversation.
Expand Down Expand Up @@ -309,7 +320,7 @@ class ChatCompletionResponse(LlmBaseModel):
class DeltaMessage(LlmBaseModel):
role: Literal["assistant"] | None = None
content: str | list[Any] | None = None
tool_calls: list[ToolCall] | None = None
tool_calls: list[ToolCallChunk] | None = None
audio: dict[str, Any] | None = None
reasoning: ResponseMessageReasoning | None = None

Expand Down
Loading