Skip to content

Commit 073a92e

Browse files
feat(chatcompletions): add handling of reasoning for third party providers
1 parent 0fe38c0 commit 073a92e

File tree

4 files changed

+69
-5
lines changed

4 files changed

+69
-5
lines changed

packages/agents-core/src/types/protocol.ts

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,16 @@ export const InputText = SharedBase.extend({
6565

6666
export type InputText = z.infer<typeof InputText>;
6767

68+
export const ReasoningText = SharedBase.extend({
69+
type: z.literal('reasoning_text'),
70+
/**
71+
* A text input for example a message from a user
72+
*/
73+
text: z.string(),
74+
});
75+
76+
export type ReasoningText = z.infer<typeof ReasoningText>;
77+
6878
export const InputImage = SharedBase.extend({
6979
type: z.literal('input_image'),
7080

@@ -452,6 +462,11 @@ export const ReasoningItem = SharedBase.extend({
452462
* The user facing representation of the reasoning. Additional information might be in the `providerData` field.
453463
*/
454464
content: z.array(InputText),
465+
466+
/**
467+
* The raw reasoning text from the model.
468+
*/
469+
rawContent: z.array(ReasoningText).optional(),
455470
});
456471

457472
export type ReasoningItem = z.infer<typeof ReasoningItem>;

packages/agents-openai/src/openaiChatCompletionsConverter.ts

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -182,10 +182,11 @@ export function itemsToMessages(
182182
});
183183
}
184184
} else if (item.type === 'reasoning') {
185-
throw new UserError(
186-
'Reasoning is not supported for chat completions. Got item: ' +
187-
JSON.stringify(item),
188-
);
185+
const asst = ensureAssistantMessage();
186+
// @ts-expect-error - reasoning is not supported in the official Chat Completion API spec
187+
// this is handling third party providers that support reasoning
188+
asst.reasoning = item.rawContent?.[0]?.text;
189+
continue;
189190
} else if (item.type === 'hosted_tool_call') {
190191
if (item.name === 'file_search_call') {
191192
const asst = ensureAssistantMessage();

packages/agents-openai/src/openaiChatCompletionsModel.ts

Lines changed: 30 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,23 @@ import { protocol } from '@openai/agents-core';
3535

3636
export const FAKE_ID = 'FAKE_ID';
3737

38+
// Some Chat Completions API compatible providers return a reasoning property on the message
39+
// If that's the case we handle them separately
40+
type OpenAIMessageWithReasoning =
41+
OpenAI.Chat.Completions.ChatCompletionMessage & {
42+
reasoning: string;
43+
};
44+
45+
function hasReasoningContent(
46+
message: OpenAI.Chat.Completions.ChatCompletionMessage,
47+
): message is OpenAIMessageWithReasoning {
48+
return (
49+
'reasoning' in message &&
50+
typeof message.reasoning === 'string' &&
51+
message.reasoning !== ''
52+
);
53+
}
54+
3855
/**
3956
* A model that uses (or is compatible with) OpenAI's Chat Completions API.
4057
*/
@@ -67,7 +84,19 @@ export class OpenAIChatCompletionsModel implements Model {
6784
const output: protocol.OutputModelItem[] = [];
6885
if (response.choices && response.choices[0]) {
6986
const message = response.choices[0].message;
70-
87+
88+
if (hasReasoningContent(message)) {
89+
output.push({
90+
type: 'reasoning',
91+
content: [],
92+
rawContent: [
93+
{
94+
type: 'reasoning_text',
95+
text: message.reasoning,
96+
},
97+
],
98+
});
99+
}
71100
if (
72101
message.content !== undefined &&
73102
message.content !== null &&

packages/agents-openai/src/openaiChatCompletionsStreaming.ts

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ type StreamingState = {
99
text_content_index_and_output: [number, protocol.OutputText] | null;
1010
refusal_content_index_and_output: [number, protocol.Refusal] | null;
1111
function_calls: Record<number, protocol.FunctionCallItem>;
12+
reasoning: string;
1213
};
1314

1415
export async function* convertChatCompletionsStreamToResponses(
@@ -21,6 +22,7 @@ export async function* convertChatCompletionsStreamToResponses(
2122
text_content_index_and_output: null,
2223
refusal_content_index_and_output: null,
2324
function_calls: {},
25+
reasoning: '',
2426
};
2527

2628
for await (const chunk of stream) {
@@ -64,6 +66,14 @@ export async function* convertChatCompletionsStreamToResponses(
6466
state.text_content_index_and_output[1].text += delta.content;
6567
}
6668

69+
if (
70+
'reasoning' in delta &&
71+
delta.reasoning &&
72+
typeof delta.reasoning === 'string'
73+
) {
74+
state.reasoning += delta.reasoning;
75+
}
76+
6777
// Handle refusals
6878
if ('refusal' in delta && delta.refusal) {
6979
if (!state.refusal_content_index_and_output) {
@@ -98,6 +108,15 @@ export async function* convertChatCompletionsStreamToResponses(
98108

99109
// Final output message
100110
const outputs: protocol.OutputModelItem[] = [];
111+
112+
if (state.reasoning) {
113+
outputs.push({
114+
type: 'reasoning',
115+
content: [],
116+
rawContent: [{ type: 'reasoning_text', text: state.reasoning }],
117+
});
118+
}
119+
101120
if (
102121
state.text_content_index_and_output ||
103122
state.refusal_content_index_and_output

0 commit comments

Comments
 (0)