From d3fabdc8d80f625d748e68ddf554642385dab2d2 Mon Sep 17 00:00:00 2001 From: Zhijie He Date: Mon, 16 Sep 2024 23:43:25 +0800 Subject: [PATCH] =?UTF-8?q?=F0=9F=90=9B=20fix:=20fix=20a=20corner=20case?= =?UTF-8?q?=20of=20`tools=5Fcall`=20with=20empty=20object=20(#3955)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 🐛 fix: fix got content in finish_reason * 🐛 fix: fix a corner case of `tools_call` with empty object --- .../utils/streams/openai.test.ts | 43 +++++++++++++++++++ .../agent-runtime/utils/streams/openai.ts | 2 +- 2 files changed, 44 insertions(+), 1 deletion(-) diff --git a/src/libs/agent-runtime/utils/streams/openai.test.ts b/src/libs/agent-runtime/utils/streams/openai.test.ts index 4c0a70c6015f..6af62efa9710 100644 --- a/src/libs/agent-runtime/utils/streams/openai.test.ts +++ b/src/libs/agent-runtime/utils/streams/openai.test.ts @@ -162,6 +162,49 @@ describe('OpenAIStream', () => { ); }); + it('should handle content with tool_calls but is an empty object', async () => { + // data: {"id":"chatcmpl-A7pokGUqSov0JuMkhiHhWU9GRtAgJ", "object":"chat.completion.chunk", "created":1726430846, "model":"gpt-4o-2024-05-13", "choices":[{"index":0, "delta":{"content":" today", "role":"", "tool_calls":[]}, "finish_reason":"", "logprobs":""}], "prompt_annotations":[{"prompt_index":0, "content_filter_results":null}]} + const mockOpenAIStream = new ReadableStream({ + start(controller) { + controller.enqueue({ + choices: [ + { + "index": 0, + "delta": { + "content": "Some contents", + "role": "", + "tool_calls": [] + }, + "finish_reason": "", + "logprobs": "" + } + ], + id: '456', + }); + + controller.close(); + }, + }); + + const onToolCallMock = vi.fn(); + + const protocolStream = OpenAIStream(mockOpenAIStream, { + onToolCall: onToolCallMock, + }); + + const decoder = new TextDecoder(); + const chunks = []; + + // @ts-ignore + for await (const chunk of protocolStream) { + chunks.push(decoder.decode(chunk, { stream: true })); + } + + expect(chunks).toEqual( + ['id: 456', 'event: text', `data: "Some contents"\n`].map((i) => `${i}\n`), + ); + }); + it('should handle other delta data', async () => { const mockOpenAIStream = new ReadableStream({ start(controller) { diff --git a/src/libs/agent-runtime/utils/streams/openai.ts b/src/libs/agent-runtime/utils/streams/openai.ts index 84b68f43e96f..14660c7ca332 100644 --- a/src/libs/agent-runtime/utils/streams/openai.ts +++ b/src/libs/agent-runtime/utils/streams/openai.ts @@ -27,7 +27,7 @@ export const transformOpenAIStream = ( return { data: chunk, id: chunk.id, type: 'data' }; } - if (item.delta?.tool_calls) { + if (typeof item.delta?.tool_calls === 'object' && item.delta.tool_calls?.length > 0) { return { data: item.delta.tool_calls.map((value, index): StreamToolCallChunkData => { if (stack && !stack.tool) {