Skip to content

Commit

Permalink
🐛 fix: suport to fetch model list on client (lobehub#2252)
Browse files Browse the repository at this point in the history
  • Loading branch information
cy948 authored Apr 28, 2024
1 parent 752e0a3 commit 76310a8
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 7 deletions.
2 changes: 1 addition & 1 deletion src/app/settings/llm/Ollama/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@ const OllamaProvider = memo(() => {
label: t('llm.checker.title'),
minWidth: undefined,
}}
modelList={{ showModelFetcher: true }}
provider={ModelProvider.Ollama}
showApiKey={false}
showBrowserRequest
showEndpoint
title={<Ollama.Combine size={24} />}
// modelList={{ showModelFetcher: true }}
/>
);
});
Expand Down
13 changes: 7 additions & 6 deletions src/libs/agent-runtime/ollama/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import { ClientOptions } from 'openai';

import { OpenAIChatMessage } from '@/libs/agent-runtime';
import { OllamaStream } from '@/libs/agent-runtime/ollama/stream';
import { ChatModelCard } from '@/types/llm';

import { LobeRuntimeAI } from '../BaseAI';
import { AgentRuntimeErrorType } from '../error';
Expand Down Expand Up @@ -64,12 +65,12 @@ export class LobeOllamaAI implements LobeRuntimeAI {
}
}

// async models(): Promise<ChatModelCard[]> {
// const list = await this.client.list();
// return list.models.map((model) => ({
// id: model.name,
// }));
// }
async models(): Promise<ChatModelCard[]> {
const list = await this.client.list();
return list.models.map((model) => ({
id: model.name,
}));
}

private buildOllamaMessages(messages: OpenAIChatMessage[]) {
return messages.map((message) => this.convertContentToOllamaMessage(message));
Expand Down
14 changes: 14 additions & 0 deletions src/services/models.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import { createHeaderWithAuth } from '@/services/_auth';
import { useGlobalStore } from '@/store/global';
import { modelConfigSelectors } from '@/store/global/selectors';
import { ChatModelCard } from '@/types/llm';

import { API_ENDPOINTS } from './_url';
import { initializeWithClientStore } from './chat';

class ModelsService {
getChatModels = async (provider: string): Promise<ChatModelCard[] | undefined> => {
Expand All @@ -10,6 +13,17 @@ class ModelsService {
provider,
});
try {
/**
* Use browser agent runtime
*/
const enableFetchOnClient = modelConfigSelectors.isProviderFetchOnClient(provider)(
useGlobalStore.getState(),
);
if (enableFetchOnClient) {
const agentRuntime = await initializeWithClientStore(provider, {});
return agentRuntime.models();
}

const res = await fetch(API_ENDPOINTS.chatModels(provider), { headers });
if (!res.ok) return;

Expand Down

0 comments on commit 76310a8

Please sign in to comment.