Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
79 changes: 79 additions & 0 deletions src/api/providers/__tests__/openai-native-tools.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -296,4 +296,83 @@ describe("OpenAiNativeHandler MCP tool schema handling", () => {
expect(tool.parameters.properties.metadata.additionalProperties).toBe(false) // Nested object
expect(tool.parameters.properties.metadata.properties.labels.items.additionalProperties).toBe(false) // Array items
})

it("should handle missing call_id and name in tool_call_arguments.delta by using pending tool identity", async () => {
const handler = new OpenAiNativeHandler({
openAiNativeApiKey: "test-key",
apiModelId: "gpt-4o",
} as ApiHandlerOptions)

const mockClient = {
responses: {
create: vi.fn().mockImplementation(() => {
return {
[Symbol.asyncIterator]: async function* () {
// 1. Emit output_item.added with tool identity
yield {
type: "response.output_item.added",
item: {
type: "function_call",
call_id: "call_123",
name: "read_file",
arguments: "",
},
}

// 2. Emit tool_call_arguments.delta WITHOUT identity (just args)
yield {
type: "response.function_call_arguments.delta",
delta: '{"path":',
}

// 3. Emit another delta
yield {
type: "response.function_call_arguments.delta",
delta: '"/tmp/test.txt"}',
}

// 4. Emit output_item.done
yield {
type: "response.output_item.done",
item: {
type: "function_call",
call_id: "call_123",
name: "read_file",
arguments: '{"path":"/tmp/test.txt"}',
},
}
},
}
}),
},
}
;(handler as any).client = mockClient

const stream = handler.createMessage("system prompt", [], {
taskId: "test-task-id",
})

const chunks: any[] = []
for await (const chunk of stream) {
if (chunk.type === "tool_call_partial") {
chunks.push(chunk)
}
}

expect(chunks.length).toBe(2)
expect(chunks[0]).toEqual({
type: "tool_call_partial",
index: 0,
id: "call_123", // Should be filled from pendingToolCallId
name: "read_file", // Should be filled from pendingToolCallName
arguments: '{"path":',
})
expect(chunks[1]).toEqual({
type: "tool_call_partial",
index: 0,
id: "call_123",
name: "read_file",
arguments: '"/tmp/test.txt"}',
})
})
})
44 changes: 35 additions & 9 deletions src/api/providers/openai-native.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,13 @@ export class OpenAiNativeHandler extends BaseProvider implements SingleCompletio
protected options: ApiHandlerOptions
private client: OpenAI
private readonly providerName = "OpenAI Native"
/**
* Some Responses streams emit tool-call argument deltas without stable call id/name.
* Track the last observed tool identity from output_item events so we can still
* emit `tool_call_partial` chunks (tool-call-only streams).
*/
private pendingToolCallId: string | undefined
private pendingToolCallName: string | undefined
// Resolved service tier from Responses API (actual tier used by OpenAI)
private lastServiceTier: ServiceTier | undefined
// Complete response output array (includes reasoning items with encrypted_content)
Expand All @@ -51,6 +58,7 @@ export class OpenAiNativeHandler extends BaseProvider implements SingleCompletio
"response.reasoning_summary_text.delta",
"response.refusal.delta",
"response.output_item.added",
"response.output_item.done",
"response.done",
"response.completed",
"response.tool_call_arguments.delta",
Expand Down Expand Up @@ -155,6 +163,9 @@ export class OpenAiNativeHandler extends BaseProvider implements SingleCompletio
this.lastResponseOutput = undefined
// Reset last response id for this request
this.lastResponseId = undefined
// Reset pending tool identity for this request
this.pendingToolCallId = undefined
this.pendingToolCallName = undefined

// Use Responses API for ALL models
const { verbosity, reasoning } = this.getModel()
Expand Down Expand Up @@ -1136,17 +1147,22 @@ export class OpenAiNativeHandler extends BaseProvider implements SingleCompletio
event?.type === "response.tool_call_arguments.delta" ||
event?.type === "response.function_call_arguments.delta"
) {
// Emit partial chunks directly - NativeToolCallParser handles state management
const callId = event.call_id || event.tool_call_id || event.id
const name = event.name || event.function_name
// Some streams omit stable identity on delta events; fall back to the
// most recently observed tool identity from output_item events.
const callId = event.call_id || event.tool_call_id || event.id || this.pendingToolCallId || undefined
const name = event.name || event.function_name || this.pendingToolCallName || undefined
const args = event.delta || event.arguments

yield {
type: "tool_call_partial",
index: event.index ?? 0,
id: callId,
name,
arguments: args,
// Avoid emitting incomplete tool_call_partial chunks; the downstream
// NativeToolCallParser needs a name to start a call.
if (typeof name === "string" && name.length > 0 && typeof callId === "string" && callId.length > 0) {
yield {
type: "tool_call_partial",
index: event.index ?? 0,
id: callId,
name,
arguments: args,
}
}
return
}
Expand All @@ -1164,6 +1180,16 @@ export class OpenAiNativeHandler extends BaseProvider implements SingleCompletio
if (event?.type === "response.output_item.added" || event?.type === "response.output_item.done") {
const item = event?.item
if (item) {
// Capture tool identity so subsequent argument deltas can be attributed.
if (item.type === "function_call" || item.type === "tool_call") {
const callId = item.call_id || item.tool_call_id || item.id
const name = item.name || item.function?.name || item.function_name
if (typeof callId === "string" && callId.length > 0) {
this.pendingToolCallId = callId
this.pendingToolCallName = typeof name === "string" ? name : undefined
}
}

if (item.type === "text" && item.text) {
yield { type: "text", text: item.text }
} else if (item.type === "reasoning" && item.text) {
Expand Down
Loading