Skip to content

Commit

Permalink
πŸ› fix: fix cannot clone agent when imported from client (lobehub#3606)
Browse files Browse the repository at this point in the history
* πŸ› fix: fix cannot clone agent when imported from client

* βœ… test: fix lint
  • Loading branch information
arvinxx authored Aug 25, 2024
1 parent b9de2e3 commit 1fd2fa0
Show file tree
Hide file tree
Showing 5 changed files with 34 additions and 27 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@
"nuqs": "^1.17.8",
"officeparser": "^4.1.1",
"ollama": "^0.5.8",
"openai": "~4.54.0",
"openai": "^4.56.0",
"partial-json": "^0.1.7",
"pdf-parse": "^1.1.1",
"pdfjs-dist": "4.4.168",
Expand Down
3 changes: 2 additions & 1 deletion src/database/server/models/session.ts
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,8 @@ export class SessionModel {

if (!result) return;

const { agent, ...session } = result;
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { agent, clientId, ...session } = result;
const sessionId = this.genId();

// eslint-disable-next-line @typescript-eslint/no-unused-vars
Expand Down
2 changes: 1 addition & 1 deletion src/libs/agent-runtime/groq/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ describe('LobeGroqAI', () => {
choices: [
{
index: 0,
message: { role: 'assistant', content: 'hello' },
message: { role: 'assistant', content: 'hello', refusal: null },
logprobs: null,
finish_reason: 'stop',
},
Expand Down
4 changes: 2 additions & 2 deletions src/libs/agent-runtime/qwen/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ describe('LobeQwenAI', () => {
});

it('should transform non-streaming response to stream correctly', async () => {
const mockResponse: OpenAI.ChatCompletion = {
const mockResponse = {
id: 'chatcmpl-fc539f49-51a8-94be-8061',
object: 'chat.completion',
created: 1719901794,
Expand All @@ -119,7 +119,7 @@ describe('LobeQwenAI', () => {
logprobs: null,
},
],
};
} as OpenAI.ChatCompletion;
vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue(
mockResponse as any,
);
Expand Down
50 changes: 28 additions & 22 deletions src/libs/agent-runtime/utils/openaiCompatibleFactory/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,7 @@ describe('LobeOpenAICompatibleFactory', () => {
});

it('should transform non-streaming response to stream correctly', async () => {
const mockResponse: OpenAI.ChatCompletion = {
const mockResponse = {
id: 'a',
object: 'chat.completion',
created: 123,
Expand All @@ -360,7 +360,7 @@ describe('LobeOpenAICompatibleFactory', () => {
completion_tokens: 5,
total_tokens: 10,
},
};
} as OpenAI.ChatCompletion;
vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue(
mockResponse as any,
);
Expand Down Expand Up @@ -426,27 +426,29 @@ describe('LobeOpenAICompatibleFactory', () => {
},
provider: ModelProvider.Mistral,
});

const instance = new LobeMockProvider({ apiKey: 'test' });
const mockCreateMethod = vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue(new ReadableStream() as any);

const mockCreateMethod = vi
.spyOn(instance['client'].chat.completions, 'create')
.mockResolvedValue(new ReadableStream() as any);

await instance.chat(
{
messages: [{ content: 'Hello', role: 'user' }],
model: 'open-mistral-7b',
temperature: 0,
},
{ user: 'testUser' }
{ user: 'testUser' },
);

expect(mockCreateMethod).toHaveBeenCalledWith(
expect.not.objectContaining({
user: 'testUser',
}),
expect.anything()
expect.anything(),
);
});

it('should add user to payload when noUserId is false', async () => {
const LobeMockProvider = LobeOpenAICompatibleFactory({
baseURL: 'https://api.mistral.ai/v1',
Expand All @@ -455,50 +457,54 @@ describe('LobeOpenAICompatibleFactory', () => {
},
provider: ModelProvider.Mistral,
});

const instance = new LobeMockProvider({ apiKey: 'test' });
const mockCreateMethod = vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue(new ReadableStream() as any);

const mockCreateMethod = vi
.spyOn(instance['client'].chat.completions, 'create')
.mockResolvedValue(new ReadableStream() as any);

await instance.chat(
{
messages: [{ content: 'Hello', role: 'user' }],
model: 'open-mistral-7b',
temperature: 0,
},
{ user: 'testUser' }
{ user: 'testUser' },
);

expect(mockCreateMethod).toHaveBeenCalledWith(
expect.objectContaining({
user: 'testUser',
}),
expect.anything()
expect.anything(),
);
});

it('should add user to payload when noUserId is not set in chatCompletion', async () => {
const LobeMockProvider = LobeOpenAICompatibleFactory({
baseURL: 'https://api.mistral.ai/v1',
provider: ModelProvider.Mistral,
});

const instance = new LobeMockProvider({ apiKey: 'test' });
const mockCreateMethod = vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue(new ReadableStream() as any);

const mockCreateMethod = vi
.spyOn(instance['client'].chat.completions, 'create')
.mockResolvedValue(new ReadableStream() as any);

await instance.chat(
{
messages: [{ content: 'Hello', role: 'user' }],
model: 'open-mistral-7b',
temperature: 0,
},
{ user: 'testUser' }
{ user: 'testUser' },
);

expect(mockCreateMethod).toHaveBeenCalledWith(
expect.objectContaining({
user: 'testUser',
}),
expect.anything()
expect.anything(),
);
});
});
Expand Down

0 comments on commit 1fd2fa0

Please sign in to comment.