Skip to content

Commit

Permalink
🐛 fix: Multiple deepseek-reasoner request errors (lobehub#5601)
Browse files Browse the repository at this point in the history
* Update index.ts

* Update index.ts

* Update index.test.ts
  • Loading branch information
sxjeru authored Feb 1, 2025
1 parent 4032658 commit 71cc32b
Show file tree
Hide file tree
Showing 2 changed files with 163 additions and 3 deletions.
135 changes: 135 additions & 0 deletions src/libs/agent-runtime/deepseek/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,15 @@ import { Mock, afterEach, beforeEach, describe, expect, it, vi } from 'vitest';

import {
ChatStreamCallbacks,
ChatStreamPayload,
LLMRoleType,
LobeOpenAICompatibleRuntime,
ModelProvider,
} from '@/libs/agent-runtime';

import * as debugStreamModule from '../utils/debugStream';
import { LobeDeepSeekAI } from './index';
import { LobeOpenAICompatibleFactory } from '../utils/openaiCompatibleFactory';

const provider = ModelProvider.DeepSeek;
const defaultBaseURL = 'https://api.deepseek.com/v1';
Expand All @@ -22,6 +25,17 @@ vi.spyOn(console, 'error').mockImplementation(() => {});

let instance: LobeOpenAICompatibleRuntime;

const createDeepSeekAIInstance = () => new LobeDeepSeekAI({ apiKey: 'test' });

const mockSuccessfulChatCompletion = () => {
vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue({
id: 'cmpl-mock',
object: 'chat.completion',
created: Date.now(),
choices: [{ index: 0, message: { role: 'assistant', content: 'Mock response' }, finish_reason: 'stop' }],
} as any);
};

beforeEach(() => {
instance = new LobeDeepSeekAI({ apiKey: 'test' });

Expand Down Expand Up @@ -251,5 +265,126 @@ describe('LobeDeepSeekAI', () => {
process.env.DEBUG_DEEPSEEK_CHAT_COMPLETION = originalDebugValue;
});
});

describe('deepseek-reasoner', () => {
beforeEach(() => {
instance = createDeepSeekAIInstance();
mockSuccessfulChatCompletion();
});

it('should insert a user message if the first message is from assistant', async () => {
const payloadMessages = [{ content: 'Hello', role: 'assistant' as LLMRoleType }];
const expectedMessages = [
{ content: '', role: 'user' },
...payloadMessages,
];

const payload: ChatStreamPayload = {
messages: payloadMessages,
model: 'deepseek-reasoner',
temperature: 0,
};

await instance.chat(payload);

expect(instance['client'].chat.completions.create).toHaveBeenCalled();
const actualArgs = (instance['client'].chat.completions.create as Mock).mock.calls[0];
const actualMessages = actualArgs[0].messages;
expect(actualMessages).toEqual(expectedMessages);
});

it('should insert a user message if the first message is from assistant (with system summary)', async () => {
const payloadMessages = [
{ content: 'System summary', role: 'system' as LLMRoleType },
{ content: 'Hello', role: 'assistant' as LLMRoleType },
];
const expectedMessages = [
{ content: 'System summary', role: 'system' },
{ content: '', role: 'user' },
{ content: 'Hello', role: 'assistant' },
];

const payload: ChatStreamPayload = {
messages: payloadMessages,
model: 'deepseek-reasoner',
temperature: 0,
};

await instance.chat(payload);

expect(instance['client'].chat.completions.create).toHaveBeenCalled();
const actualArgs = (instance['client'].chat.completions.create as Mock).mock.calls[0];
const actualMessages = actualArgs[0].messages;
expect(actualMessages).toEqual(expectedMessages);
});

it('should insert alternating roles if messages do not alternate', async () => {
const payloadMessages = [
{ content: 'user1', role: 'user' as LLMRoleType },
{ content: 'user2', role: 'user' as LLMRoleType },
{ content: 'assistant1', role: 'assistant' as LLMRoleType },
{ content: 'assistant2', role: 'assistant' as LLMRoleType },
];
const expectedMessages = [
{ content: 'user1', role: 'user' },
{ content: '', role: 'assistant' },
{ content: 'user2', role: 'user' },
{ content: 'assistant1', role: 'assistant' },
{ content: '', role: 'user' },
{ content: 'assistant2', role: 'assistant' },
];

const payload: ChatStreamPayload = {
messages: payloadMessages,
model: 'deepseek-reasoner',
temperature: 0,
};

await instance.chat(payload);

expect(instance['client'].chat.completions.create).toHaveBeenCalled();
const actualArgs = (instance['client'].chat.completions.create as Mock).mock.calls[0];
const actualMessages = actualArgs[0].messages;
expect(actualMessages).toEqual(expectedMessages);
});

it('complex condition', async () => {
const payloadMessages = [
{ content: 'system', role: 'system' as LLMRoleType },
{ content: 'assistant', role: 'assistant' as LLMRoleType },
{ content: 'user1', role: 'user' as LLMRoleType },
{ content: 'user2', role: 'user' as LLMRoleType },
{ content: 'user3', role: 'user' as LLMRoleType },
{ content: 'assistant1', role: 'assistant' as LLMRoleType },
{ content: 'assistant2', role: 'assistant' as LLMRoleType },
];
const expectedMessages = [
{ content: 'system', role: 'system' },
{ content: '', role: 'user' },
{ content: 'assistant', role: 'assistant' },
{ content: 'user1', role: 'user' },
{ content: '', role: 'assistant' },
{ content: 'user2', role: 'user' },
{ content: '', role: 'assistant' },
{ content: 'user3', role: 'user' },
{ content: 'assistant1', role: 'assistant' },
{ content: '', role: 'user' },
{ content: 'assistant2', role: 'assistant' },
];

const payload: ChatStreamPayload = {
messages: payloadMessages,
model: 'deepseek-reasoner',
temperature: 0,
};

await instance.chat(payload);

expect(instance['client'].chat.completions.create).toHaveBeenCalled();
const actualArgs = (instance['client'].chat.completions.create as Mock).mock.calls[0];
const actualMessages = actualArgs[0].messages;
expect(actualMessages).toEqual(expectedMessages);
});
});
});
});
31 changes: 28 additions & 3 deletions src/libs/agent-runtime/deepseek/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,24 +12,49 @@ export interface DeepSeekModelCard {
export const LobeDeepSeekAI = LobeOpenAICompatibleFactory({
baseURL: 'https://api.deepseek.com/v1',
chatCompletion: {
handlePayload: ({ frequency_penalty, model, presence_penalty, temperature, top_p, ...payload }: ChatStreamPayload) =>
({
handlePayload: ({ frequency_penalty, messages, model, presence_penalty, temperature, top_p, ...payload }: ChatStreamPayload) => {
// github.com/lobehub/lobe-chat/pull/5548
let filteredMessages = messages.filter(message => message.role !== 'system');

if (filteredMessages.length > 0 && filteredMessages[0].role === 'assistant') {
filteredMessages.unshift({ content: "", role: "user" });
}

let lastRole = '';
for (let i = 0; i < filteredMessages.length; i++) {
const message = filteredMessages[i];
if (message.role === lastRole) {
const newRole = lastRole === 'assistant' ? 'user' : 'assistant';
filteredMessages.splice(i, 0, { content: "", role: newRole });
i++;
}
lastRole = message.role;
}

if (messages.length > 0 && messages[0].role === 'system') {
filteredMessages.unshift(messages[0]);
}

return {
...payload,
model,
...(model === 'deepseek-reasoner'
? {
frequency_penalty: undefined,
messages: filteredMessages,
presence_penalty: undefined,
temperature: undefined,
top_p: undefined,
}
: {
frequency_penalty,
messages,
presence_penalty,
temperature,
top_p,
}),
}) as OpenAI.ChatCompletionCreateParamsStreaming,
} as OpenAI.ChatCompletionCreateParamsStreaming;
},
},
debug: {
chatCompletion: () => process.env.DEBUG_DEEPSEEK_CHAT_COMPLETION === '1',
Expand Down

0 comments on commit 71cc32b

Please sign in to comment.