Skip to content

Commit 982ff77

Browse files
authored
Merge pull request #49 from chaizhenhua/fix/streaming-tool-call-index
fix: add missing index field to streaming tool_call chunks
2 parents c3f4389 + 5e7a64e commit 982ff77

4 files changed

Lines changed: 58 additions & 11 deletions

File tree

ccproxy/llms/formatters/anthropic_to_openai/_helpers.py

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,3 +42,33 @@ def build_openai_tool_call(
4242
arguments=str(args_str),
4343
),
4444
)
45+
46+
47+
def build_openai_tool_call_chunk(
48+
*,
49+
index: int,
50+
tool_id: str | None,
51+
tool_name: str | None,
52+
tool_input: Any,
53+
arguments: Any = None,
54+
fallback_index: int = 0,
55+
) -> openai_models.ToolCallChunk:
56+
args_str = (
57+
arguments
58+
if isinstance(arguments, str) and arguments
59+
else serialize_tool_arguments(tool_input)
60+
)
61+
call_id = (
62+
tool_id if isinstance(tool_id, str) and tool_id else f"call_{fallback_index}"
63+
)
64+
name = tool_name if isinstance(tool_name, str) and tool_name else "function"
65+
66+
return openai_models.ToolCallChunk(
67+
index=index,
68+
id=str(call_id),
69+
type="function",
70+
function=openai_models.FunctionCall(
71+
name=str(name),
72+
arguments=str(args_str),
73+
),
74+
)

ccproxy/llms/formatters/anthropic_to_openai/streams.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
from ccproxy.llms.models import openai as openai_models
2828
from ccproxy.llms.streaming.accumulators import ClaudeAccumulator
2929

30-
from ._helpers import build_openai_tool_call
30+
from ._helpers import build_openai_tool_call_chunk
3131
from .requests import _build_responses_payload_from_anthropic_request
3232
from .responses import convert__anthropic_usage_to_openai_responses__usage
3333

@@ -88,10 +88,10 @@ def _anthropic_delta_to_text(
8888
return None
8989

9090

91-
def _build_openai_tool_call(
91+
def _build_openai_tool_call_chunk(
9292
accumulator: ClaudeAccumulator,
9393
block_index: int,
94-
) -> openai_models.ToolCall | None:
94+
) -> openai_models.ToolCallChunk | None:
9595
for tool_call in accumulator.get_complete_tool_calls():
9696
if tool_call.get("index") != block_index:
9797
continue
@@ -102,7 +102,8 @@ def _build_openai_tool_call(
102102
tool_name = function_payload.get("name") or tool_call.get("name")
103103
arguments = function_payload.get("arguments")
104104

105-
return build_openai_tool_call(
105+
return build_openai_tool_call_chunk(
106+
index=tool_call.get("index", block_index),
106107
tool_id=tool_call.get("id"),
107108
tool_name=tool_name,
108109
tool_input=tool_call.get("input", {}),
@@ -1413,7 +1414,7 @@ async def generator() -> AsyncGenerator[
14131414
continue
14141415
if block_index in emitted_tool_indices:
14151416
continue
1416-
tool_call = _build_openai_tool_call(accumulator, block_index)
1417+
tool_call = _build_openai_tool_call_chunk(accumulator, block_index)
14171418
if tool_call is None:
14181419
continue
14191420
emitted_tool_indices.add(block_index)

ccproxy/llms/formatters/openai_to_openai/streams.py

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -389,7 +389,8 @@ def create_text_chunk(
389389

390390
# Emit initial tool call chunk to surface id/name information
391391
if not state.initial_emitted:
392-
tool_call = openai_models.ToolCall(
392+
tool_call = openai_models.ToolCallChunk(
393+
index=state.index,
393394
id=state.id,
394395
type="function",
395396
function=openai_models.FunctionCall(
@@ -442,7 +443,8 @@ def create_text_chunk(
442443
state.name = guessed
443444

444445
if state.initial_emitted:
445-
tool_call = openai_models.ToolCall(
446+
tool_call = openai_models.ToolCallChunk(
447+
index=state.index,
446448
id=state.id,
447449
type="function",
448450
function=openai_models.FunctionCall(
@@ -494,7 +496,8 @@ def create_text_chunk(
494496
if guessed:
495497
state.name = guessed
496498

497-
tool_call = openai_models.ToolCall(
499+
tool_call = openai_models.ToolCallChunk(
500+
index=state.index,
498501
id=state.id,
499502
type="function",
500503
function=openai_models.FunctionCall(
@@ -586,7 +589,8 @@ def create_text_chunk(
586589
if guessed:
587590
state.name = guessed
588591
if not state.arguments_emitted:
589-
tool_call = openai_models.ToolCall(
592+
tool_call = openai_models.ToolCallChunk(
593+
index=state.index,
590594
id=state.id,
591595
type="function",
592596
function=openai_models.FunctionCall(
@@ -616,7 +620,8 @@ def create_text_chunk(
616620

617621
# Emit a patch chunk if the name was never surfaced earlier
618622
if state.name and not state.name_emitted:
619-
tool_call = openai_models.ToolCall(
623+
tool_call = openai_models.ToolCallChunk(
624+
index=state.index,
620625
id=state.id,
621626
type="function",
622627
function=openai_models.FunctionCall(

ccproxy/llms/models/openai.py

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -185,11 +185,22 @@ class FunctionCall(LlmBaseModel):
185185

186186

187187
class ToolCall(LlmBaseModel):
188+
"""Non-streaming tool call (ChatCompletionMessageToolCall)."""
189+
188190
id: str
189191
type: Literal["function"] = Field(default="function")
190192
function: FunctionCall
191193

192194

195+
class ToolCallChunk(LlmBaseModel):
196+
"""Streaming tool call delta (ChoiceDeltaToolCall)."""
197+
198+
index: int
199+
id: str | None = None
200+
type: Literal["function"] | None = None
201+
function: FunctionCall | None = None
202+
203+
193204
class ChatMessage(LlmBaseModel):
194205
"""
195206
A message within a chat conversation.
@@ -309,7 +320,7 @@ class ChatCompletionResponse(LlmBaseModel):
309320
class DeltaMessage(LlmBaseModel):
310321
role: Literal["assistant"] | None = None
311322
content: str | list[Any] | None = None
312-
tool_calls: list[ToolCall] | None = None
323+
tool_calls: list[ToolCallChunk] | None = None
313324
audio: dict[str, Any] | None = None
314325
reasoning: ResponseMessageReasoning | None = None
315326

0 commit comments

Comments
 (0)