From f0c9532bdfd6530426c724040a0170deb3e6d537 Mon Sep 17 00:00:00 2001 From: Arvin Xu Date: Sun, 10 Sep 2023 22:18:43 +0800 Subject: [PATCH] =?UTF-8?q?=20=E2=9C=A8=20feat:=20support=20Azure=20OpenAI?= =?UTF-8?q?=20(#177)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 💄 style: 拆分独立的 LLM Tab * ✨ feat: 支持 Azure OpenAI 调用 * 🚨 ci: fix types * 🗃️ fix: 补充数据迁移逻辑 * 🚸 style: 优化对用户的表达感知 * 💄 style: fix layout * 🚨 ci: fix circular dependencies * ✅ test: fix test * 🎨 chore: clean storage --- next.config.mjs | 8 +- package.json | 13 +- src/config/server.ts | 2 + src/const/fetch.ts | 17 +- src/const/llm.ts | 17 ++ src/const/settings.ts | 24 +- src/locales/default/setting.ts | 72 ++++- .../{openai.ts => createChatCompletion.ts} | 43 +-- src/pages/api/openai.api.ts | 24 -- src/pages/api/openai/chat.api.ts | 43 +++ src/pages/api/openai/createAzureOpenai.ts | 26 ++ src/pages/api/openai/createOpenai.ts | 17 ++ src/pages/api/openai/models.api.ts | 17 ++ .../settings/features/Settings/Common.tsx | 28 +- .../features/Settings/LLM/Checker.tsx | 85 ++++++ .../features/Settings/LLM/ModelList.tsx | 51 ++++ .../features/Settings/LLM/getModelList.ts | 9 + .../settings/features/Settings/LLM/index.tsx | 152 ++++++++++ .../settings/features/Settings/index.tsx | 25 +- src/services/_header.ts | 33 +++ src/services/_url.ts | 17 ++ src/services/chatModel.ts | 14 +- src/services/modelList.ts | 15 + src/services/plugin.ts | 2 +- src/services/pluginMarket.ts | 2 +- src/services/url.ts | 8 - src/store/global/hooks/index.ts | 1 + .../global/hooks/useEffectAfterHydrated.ts | 22 ++ src/store/global/initialState.ts | 4 + src/store/global/selectors.test.ts | 264 ++++++++++++++++++ src/store/global/selectors.ts | 9 +- src/store/global/slices/settings.ts | 20 +- src/store/global/store.ts | 39 ++- src/types/settings.ts | 53 +++- src/utils/fetch.ts | 16 +- 35 files changed, 1006 insertions(+), 186 deletions(-) create mode 100644 src/const/llm.ts rename src/pages/api/{openai.ts => createChatCompletion.ts} (50%) delete mode 100644 src/pages/api/openai.api.ts create mode 100644 src/pages/api/openai/chat.api.ts create mode 100644 src/pages/api/openai/createAzureOpenai.ts create mode 100644 src/pages/api/openai/createOpenai.ts create mode 100644 src/pages/api/openai/models.api.ts create mode 100644 src/pages/settings/features/Settings/LLM/Checker.tsx create mode 100644 src/pages/settings/features/Settings/LLM/ModelList.tsx create mode 100644 src/pages/settings/features/Settings/LLM/getModelList.ts create mode 100644 src/pages/settings/features/Settings/LLM/index.tsx create mode 100644 src/services/_header.ts create mode 100644 src/services/_url.ts create mode 100644 src/services/modelList.ts delete mode 100644 src/services/url.ts create mode 100644 src/store/global/hooks/useEffectAfterHydrated.ts create mode 100644 src/store/global/selectors.test.ts diff --git a/next.config.mjs b/next.config.mjs index 6aece5e24ba7..7172072c291a 100644 --- a/next.config.mjs +++ b/next.config.mjs @@ -27,8 +27,12 @@ const nextConfig = { async rewrites() { return [ { - source: '/api/openai-dev', - destination: `${API_END_PORT_URL}/api/openai`, + source: '/api/openai/chat-dev', + destination: `${API_END_PORT_URL}/api/openai/chat`, + }, + { + source: '/api/openai/models-dev', + destination: `${API_END_PORT_URL}/api/openai/models`, }, { source: '/api/plugins-dev', diff --git a/package.json b/package.json index 0e9a57f4266e..9de0ebac2271 100644 --- a/package.json +++ b/package.json @@ -65,11 +65,12 @@ "dependencies": { "@ant-design/colors": "^7", "@ant-design/icons": "^5", + "@azure/openai": "latest", "@emoji-mart/data": "^1", "@emoji-mart/react": "^1", "@icons-pack/react-simple-icons": "^9", - "@lobehub/chat-plugin-sdk": "^1.17.0", - "@lobehub/chat-plugins-gateway": "^1.5.0", + "@lobehub/chat-plugin-sdk": "^1.17.7", + "@lobehub/chat-plugins-gateway": "^1.5.1", "@lobehub/ui": "latest", "@vercel/analytics": "^1", "ahooks": "^3", @@ -96,11 +97,11 @@ "react-i18next": "^13", "react-intersection-observer": "^9", "react-layout-kit": "^1.7.1", - "serpapi": "^2", "swr": "^2", "systemjs": "^6.14.2", "ts-md5": "^1", "use-merge-value": "^1", + "utility-types": "^3", "uuid": "^9", "zustand": "^4.4", "zustand-utils": "^1" @@ -142,12 +143,6 @@ "typescript": "^5", "vitest": "latest" }, - "peerDependencies": { - "antd": ">=5", - "antd-style": ">=3", - "react": ">=18", - "react-dom": ">=18" - }, "publishConfig": { "access": "public", "registry": "https://registry.npmjs.org" diff --git a/src/config/server.ts b/src/config/server.ts index bc88c14d44bf..3007931591a4 100644 --- a/src/config/server.ts +++ b/src/config/server.ts @@ -3,6 +3,7 @@ declare global { namespace NodeJS { interface ProcessEnv { ACCESS_CODE?: string; + AZURE_API_KEY?: string; OPENAI_API_KEY?: string; OPENAI_PROXY_URL?: string; } @@ -16,6 +17,7 @@ export const getServerConfig = () => { return { ACCESS_CODE: process.env.ACCESS_CODE, + AZURE_API_KEY: process.env.AZURE_API_KEY, OPENAI_API_KEY: process.env.OPENAI_API_KEY, OPENAI_PROXY_URL: process.env.OPENAI_PROXY_URL, }; diff --git a/src/const/fetch.ts b/src/const/fetch.ts index 1155fad4dd42..334b5a3870fb 100644 --- a/src/const/fetch.ts +++ b/src/const/fetch.ts @@ -1,7 +1,20 @@ export const OPENAI_END_POINT = 'X-OPENAI-END_POINT'; - export const OPENAI_API_KEY_HEADER_KEY = 'X-OPENAI-API-KEY'; +export const USE_AZURE_OPENAI = 'X-USE_AZURE_OPENAI'; + +export const AZURE_OPENAI_API_VERSION = 'X-AZURE_OPENAI_API_VERSION'; + export const LOBE_CHAT_ACCESS_CODE = 'X-LOBE_CHAT_ACCESS_CODE'; -export const LOBE_PLUGIN_SETTINGS = 'X-LOBE_PLUGIN_SETTINGS'; +export const getOpenAIAuthFromRequest = (req: Request) => { + const apiKey = req.headers.get(OPENAI_API_KEY_HEADER_KEY); + const endpoint = req.headers.get(OPENAI_END_POINT); + const accessCode = req.headers.get(LOBE_CHAT_ACCESS_CODE); + const useAzureStr = req.headers.get(USE_AZURE_OPENAI); + const apiVersion = req.headers.get(AZURE_OPENAI_API_VERSION); + + const useAzure = !!useAzureStr; + + return { accessCode, apiKey, apiVersion, endpoint, useAzure }; +}; diff --git a/src/const/llm.ts b/src/const/llm.ts new file mode 100644 index 000000000000..da849775d93f --- /dev/null +++ b/src/const/llm.ts @@ -0,0 +1,17 @@ +/** + * A white list of language models that are allowed to display and be used in the app. + */ +export const LanguageModelWhiteList = [ + // OpenAI + 'gpt-3.5-turbo', + 'gpt-3.5-turbo-16k', + 'gpt-4', + 'gpt-4-32k', +]; + +export const DEFAULT_OPENAI_MODEL_LIST = [ + 'gpt-3.5-turbo', + 'gpt-3.5-turbo-16k', + 'gpt-4', + 'gpt-4-32k', +]; diff --git a/src/const/settings.ts b/src/const/settings.ts index f12d81481e07..32be379d93c7 100644 --- a/src/const/settings.ts +++ b/src/const/settings.ts @@ -1,18 +1,17 @@ +import { DEFAULT_OPENAI_MODEL_LIST } from '@/const/llm'; import { DEFAULT_AGENT_META } from '@/const/meta'; import { LanguageModel } from '@/types/llm'; import { LobeAgentConfig } from '@/types/session'; -import { GlobalBaseSettings, GlobalDefaultAgent, GlobalSettings } from '@/types/settings'; +import { + GlobalBaseSettings, + GlobalDefaultAgent, + GlobalLLMConfig, + GlobalSettings, +} from '@/types/settings'; export const DEFAULT_BASE_SETTINGS: GlobalBaseSettings = { - OPENAI_API_KEY: '', avatar: '', - compressThreshold: 24, - enableCompressThreshold: false, - enableHistoryCount: false, - enableMaxTokens: true, - endpoint: '', fontSize: 14, - historyCount: 24, language: 'zh-CN', neutralColor: '', password: '', @@ -34,6 +33,14 @@ export const DEFAULT_AGENT_CONFIG: LobeAgentConfig = { systemRole: '', }; +export const DEFAULT_LLM_CONFIG: GlobalLLMConfig = { + openAI: { + OPENAI_API_KEY: '', + azureApiVersion: '2023-08-01-preview', + models: DEFAULT_OPENAI_MODEL_LIST, + }, +}; + export const DEFAULT_AGENT: GlobalDefaultAgent = { config: DEFAULT_AGENT_CONFIG, meta: DEFAULT_AGENT_META, @@ -41,5 +48,6 @@ export const DEFAULT_AGENT: GlobalDefaultAgent = { export const DEFAULT_SETTINGS: GlobalSettings = { defaultAgent: DEFAULT_AGENT, + languageModel: DEFAULT_LLM_CONFIG, ...DEFAULT_BASE_SETTINGS, }; diff --git a/src/locales/default/setting.ts b/src/locales/default/setting.ts index feeb4db43858..1863cb3f7075 100644 --- a/src/locales/default/setting.ts +++ b/src/locales/default/setting.ts @@ -20,6 +20,64 @@ export default { session: '会话设置', sessionWithName: '会话设置 · {{name}}', }, + llm: { + AzureOpenAI: { + endpoint: { + desc: '从 Azure 门户检查资源时,可在“密钥和终结点”部分中找到此值', + placeholder: 'https://docs-test-001.openai.azure.com', + title: 'Azure API 地址', + }, + models: { + desc: '支持的模型', + title: '模型列表', + }, + title: 'Azure OpenAI 设置', + token: { + desc: '从 Azure 门户检查资源时,可在“密钥和终结点”部分中找到此值。 可以使用 KEY1 或 KEY2', + placeholder: 'Azure API Key', + title: 'API Key', + }, + }, + OpenAI: { + azureApiVersion: { + desc: 'Azure 的 API 版本,遵循 YYYY-MM-DD 格式,查阅[最新版本](https://learn.microsoft.com/zh-cn/azure/ai-services/openai/reference#chat-completions)', + fetch: '获取列表', + title: 'Azure Api Version', + }, + check: { + button: '检查', + desc: '测试 Api Key 与代理地址是否正确填写', + pass: '检查通过', + title: '连通性检查', + }, + endpoint: { + desc: '除默认地址外,必须包含 http(s)://', + placeholder: 'https://api.openai.com/v1', + title: '接口代理地址', + }, + models: { + count: '共支持 {{count}} 个模型', + desc: '支持的模型', + fetch: '获取模型列表', + notSupport: 'Azure OpenAI 暂不支持查看模型列表', + notSupportTip: '你需要自行确保部署名称与模型名称一致', + refetch: '重新获取模型列表', + title: '模型列表', + }, + title: 'OpenAI 设置', + token: { + desc: '使用自己的 OpenAI Key', + placeholder: 'OpenAI API Key', + title: 'API Key', + }, + useAzure: { + desc: '使用 Azure 提供的 OpenAI 服务', + fetch: '获取列表', + title: 'Azure OpenAI', + }, + }, + waitingForMore: '更多模型正在 <1>计划接入 中,敬请期待 ✨', + }, settingAgent: { avatar: { title: '头像', @@ -114,19 +172,6 @@ export default { title: '核采样', }, }, - settingOpenAI: { - endpoint: { - desc: '除默认地址外,必须包含 http(s)://', - placeholder: 'https://api.openai.com/v1', - title: '接口代理地址', - }, - title: 'OpenAI 设置', - token: { - desc: '使用自己的 OpenAI Key', - placeholder: 'OpenAI API Key', - title: 'API Key', - }, - }, settingPlugin: { add: '添加', addTooltip: '添加自定义插件', @@ -173,5 +218,6 @@ export default { tab: { agent: '默认助手', common: '通用设置', + llm: '语言模型', }, }; diff --git a/src/pages/api/openai.ts b/src/pages/api/createChatCompletion.ts similarity index 50% rename from src/pages/api/openai.ts rename to src/pages/api/createChatCompletion.ts index c4121432d9e6..7b5732be5153 100644 --- a/src/pages/api/openai.ts +++ b/src/pages/api/createChatCompletion.ts @@ -1,46 +1,17 @@ import { OpenAIStream, StreamingTextResponse } from 'ai'; -import OpenAI, { ClientOptions } from 'openai'; +import OpenAI from 'openai'; -import { getServerConfig } from '@/config/server'; import { createErrorResponse } from '@/pages/api/error'; import { ChatErrorType } from '@/types/fetch'; import { OpenAIStreamPayload } from '@/types/openai'; -// 创建 OpenAI 实例 -export const createOpenAI = (userApiKey: string | null, endpoint?: string | null) => { - const { OPENAI_API_KEY, OPENAI_PROXY_URL } = getServerConfig(); - - const baseURL = endpoint ? endpoint : OPENAI_PROXY_URL ? OPENAI_PROXY_URL : undefined; - - const config: ClientOptions = { - apiKey: !userApiKey ? OPENAI_API_KEY : userApiKey, - }; - - // a bug with openai: https://github.com/openai/openai-node/issues/283 - // TODO: should refactor when openai fix the bug - if (baseURL) { - config.baseURL = baseURL; - } - - return new OpenAI(config); -}; - interface CreateChatCompletionOptions { - OPENAI_API_KEY: string | null; - endpoint?: string | null; + openai: OpenAI; payload: OpenAIStreamPayload; } -export const createChatCompletion = async ({ - payload, - OPENAI_API_KEY, - endpoint, -}: CreateChatCompletionOptions) => { - // ============ 0.创建 OpenAI 实例 ============ // - - const openai = createOpenAI(OPENAI_API_KEY, endpoint); - - // ============ 1. 前置处理 messages ============ // +export const createChatCompletion = async ({ payload, openai }: CreateChatCompletionOptions) => { + // ============ 1. preprocess messages ============ // const { messages, ...params } = payload; const formatMessages = messages.map((m) => ({ @@ -49,7 +20,7 @@ export const createChatCompletion = async ({ role: m.role, })); - // ============ 2. 发送请求 ============ // + // ============ 2. send api ============ // try { const response = await openai.chat.completions.create({ @@ -63,7 +34,7 @@ export const createChatCompletion = async ({ // Check if the error is an OpenAI APIError if (error instanceof OpenAI.APIError) { return createErrorResponse(ChatErrorType.OpenAIBizError, { - endpoint: !!endpoint ? endpoint : undefined, + endpoint: openai.baseURL, error: error.error ?? error.cause, }); } @@ -73,7 +44,7 @@ export const createChatCompletion = async ({ // return as a GatewayTimeout error return createErrorResponse(ChatErrorType.InternalServerError, { - endpoint, + endpoint: openai.baseURL, error: JSON.stringify(error), }); } diff --git a/src/pages/api/openai.api.ts b/src/pages/api/openai.api.ts deleted file mode 100644 index 2390cd9e0cab..000000000000 --- a/src/pages/api/openai.api.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { LOBE_CHAT_ACCESS_CODE, OPENAI_API_KEY_HEADER_KEY, OPENAI_END_POINT } from '@/const/fetch'; -import { ErrorType } from '@/types/fetch'; -import { OpenAIStreamPayload } from '@/types/openai'; - -import { checkAuth } from './auth'; -import { createErrorResponse } from './error'; -import { createChatCompletion } from './openai'; - -export const runtime = 'edge'; - -export default async function handler(req: Request) { - const payload = (await req.json()) as OpenAIStreamPayload; - const apiKey = req.headers.get(OPENAI_API_KEY_HEADER_KEY); - const accessCode = req.headers.get(LOBE_CHAT_ACCESS_CODE); - const endpoint = req.headers.get(OPENAI_END_POINT); - - const result = checkAuth({ accessCode, apiKey }); - - if (!result.auth) { - return createErrorResponse(result.error as ErrorType); - } - - return createChatCompletion({ OPENAI_API_KEY: apiKey, endpoint, payload }); -} diff --git a/src/pages/api/openai/chat.api.ts b/src/pages/api/openai/chat.api.ts new file mode 100644 index 000000000000..d64ae7bb8e4e --- /dev/null +++ b/src/pages/api/openai/chat.api.ts @@ -0,0 +1,43 @@ +import OpenAI from 'openai'; + +import { getOpenAIAuthFromRequest } from '@/const/fetch'; +import { ChatErrorType, ErrorType } from '@/types/fetch'; +import { OpenAIStreamPayload } from '@/types/openai'; + +import { checkAuth } from '../auth'; +import { createChatCompletion } from '../createChatCompletion'; +import { createErrorResponse } from '../error'; +import { createAzureOpenai } from './createAzureOpenai'; +import { createOpenai } from './createOpenai'; + +export const runtime = 'edge'; + +export default async function handler(req: Request) { + const payload = (await req.json()) as OpenAIStreamPayload; + + const { apiKey, accessCode, endpoint, useAzure, apiVersion } = getOpenAIAuthFromRequest(req); + + const result = checkAuth({ accessCode, apiKey }); + + if (!result.auth) { + return createErrorResponse(result.error as ErrorType); + } + + let openai: OpenAI; + if (useAzure) { + if (!apiVersion) return createErrorResponse(ChatErrorType.BadRequest); + + // `https://test-001.openai.azure.com/openai/deployments/gpt-35-turbo`, + const url = `${endpoint}/openai/deployments/${payload.model.replace('.', '')}`; + + openai = createAzureOpenai({ + apiVersion, + endpoint: url, + userApiKey: apiKey, + }); + } else { + openai = createOpenai(apiKey, endpoint); + } + + return createChatCompletion({ openai, payload }); +} diff --git a/src/pages/api/openai/createAzureOpenai.ts b/src/pages/api/openai/createAzureOpenai.ts new file mode 100644 index 000000000000..1108a33ba628 --- /dev/null +++ b/src/pages/api/openai/createAzureOpenai.ts @@ -0,0 +1,26 @@ +import OpenAI, { ClientOptions } from 'openai'; + +import { getServerConfig } from '@/config/server'; + +// 创建 Azure OpenAI 实例 +export const createAzureOpenai = (params: { + apiVersion: string; + endpoint: string; + userApiKey?: string | null; +}) => { + const { AZURE_API_KEY } = getServerConfig(); + + const baseURL = params.endpoint; + const apiKey = !params.userApiKey ? AZURE_API_KEY : params.userApiKey; + + const config: ClientOptions = { + apiKey: apiKey, + baseURL, + defaultHeaders: { 'api-key': apiKey }, + defaultQuery: { + 'api-version': params.apiVersion, + }, + }; + + return new OpenAI(config); +}; diff --git a/src/pages/api/openai/createOpenai.ts b/src/pages/api/openai/createOpenai.ts new file mode 100644 index 000000000000..b3ad8a53c68f --- /dev/null +++ b/src/pages/api/openai/createOpenai.ts @@ -0,0 +1,17 @@ +import OpenAI, { ClientOptions } from 'openai'; + +import { getServerConfig } from '@/config/server'; + +// 创建 OpenAI 实例 +export const createOpenai = (userApiKey: string | null, endpoint?: string | null) => { + const { OPENAI_API_KEY, OPENAI_PROXY_URL } = getServerConfig(); + + const baseURL = endpoint ? endpoint : OPENAI_PROXY_URL ? OPENAI_PROXY_URL : undefined; + + const config: ClientOptions = { + apiKey: !userApiKey ? OPENAI_API_KEY : userApiKey, + baseURL, + }; + + return new OpenAI(config); +}; diff --git a/src/pages/api/openai/models.api.ts b/src/pages/api/openai/models.api.ts new file mode 100644 index 000000000000..750f13591872 --- /dev/null +++ b/src/pages/api/openai/models.api.ts @@ -0,0 +1,17 @@ +import { getOpenAIAuthFromRequest } from '@/const/fetch'; + +import { createOpenai } from './createOpenai'; + +export const runtime = 'edge'; + +export default async function handler(req: Request) { + const { apiKey, endpoint } = getOpenAIAuthFromRequest(req); + + const openAI = createOpenai(apiKey, endpoint); + + const res = await openAI.models.list(); + + const modelList = res.data.map((i) => i.id); + + return new Response(JSON.stringify(modelList)); +} diff --git a/src/pages/settings/features/Settings/Common.tsx b/src/pages/settings/features/Settings/Common.tsx index 3c26412f0d24..0713ce5f0fa2 100644 --- a/src/pages/settings/features/Settings/Common.tsx +++ b/src/pages/settings/features/Settings/Common.tsx @@ -3,7 +3,7 @@ import { Form as AntForm, App, Button, Input, Select } from 'antd'; import isEqual from 'fast-deep-equal'; import { changeLanguage } from 'i18next'; import { debounce } from 'lodash-es'; -import { AppWindow, Monitor, Moon, Palette, Sun, Webhook } from 'lucide-react'; +import { AppWindow, Monitor, Moon, Palette, Sun } from 'lucide-react'; import { memo, useCallback, useMemo } from 'react'; import { useTranslation } from 'react-i18next'; @@ -144,30 +144,6 @@ const Common = memo(() => { [settings], ); - const openAI: SettingItemGroup = useMemo( - () => ({ - children: [ - { - children: , - desc: t('settingOpenAI.token.desc'), - label: t('settingOpenAI.token.title'), - name: 'OPENAI_API_KEY', - }, - { - children: , - desc: t('settingOpenAI.endpoint.desc'), - - label: t('settingOpenAI.endpoint.title'), - - name: 'endpoint', - }, - ], - icon: Webhook, - title: t('settingOpenAI.title'), - }), - [settings], - ); - const system: SettingItemGroup = useMemo( () => ({ children: [ @@ -204,7 +180,7 @@ const Common = memo(() => { [settings], ); - const items = useMemo(() => [theme, openAI, system], [settings]); + const items = useMemo(() => [theme, system], [settings]); return (
(({ checkModel }) => { + const { t } = useTranslation('setting'); + + const [loading, setLoading] = useState(false); + const [pass, setPass] = useState(false); + + const theme = useTheme(); + const [error, setError] = useState(); + + const checkConnection = async () => { + if (checkModel) { + getModelList(); + } + + const data = await fetchPresetTaskResult({ + onError: (_, rawError) => { + setError(rawError); + }, + onLoadingChange: (loading) => { + setLoading(loading); + }, + params: { + messages: [ + { + content: '你好', + role: 'user', + }, + ], + model: 'gpt-3.5-turbo', + }, + }); + + if (data) { + setError(undefined); + setPass(true); + } + }; + return ( + + + + + {pass && ( + + + {t('llm.OpenAI.check.pass')} + + )} + + + {error && ( + + + + {JSON.stringify(error.body, null, 2)} + + + )} + + ); +}); + +export default Checker; diff --git a/src/pages/settings/features/Settings/LLM/ModelList.tsx b/src/pages/settings/features/Settings/LLM/ModelList.tsx new file mode 100644 index 000000000000..6dec8382b316 --- /dev/null +++ b/src/pages/settings/features/Settings/LLM/ModelList.tsx @@ -0,0 +1,51 @@ +import { ActionIcon } from '@lobehub/ui'; +import { Button, Dropdown, Skeleton } from 'antd'; +import { MenuItemType } from 'antd/lib/menu/hooks/useItems'; +import { RotateCwIcon } from 'lucide-react'; +import { memo } from 'react'; +import { useTranslation } from 'react-i18next'; +import { Flexbox } from 'react-layout-kit'; + +import { getModelList } from './getModelList'; + +interface ModelListProps { + value?: string[]; +} +const ModelList = memo(({ value }) => { + const { t } = useTranslation('setting'); + + const isLoading = !value; + + if (isLoading) + return ( + + ); + + const isEmpty = value?.length === 0; + + return isEmpty ? ( + + ) : ( + ((v) => ({ + key: v, + label: v, + })), + }} + > + + {t('llm.OpenAI.models.count', { count: value.length })} + + + + ); +}); + +export default ModelList; diff --git a/src/pages/settings/features/Settings/LLM/getModelList.ts b/src/pages/settings/features/Settings/LLM/getModelList.ts new file mode 100644 index 000000000000..e0319b2b22b6 --- /dev/null +++ b/src/pages/settings/features/Settings/LLM/getModelList.ts @@ -0,0 +1,9 @@ +import { fetchModelList } from '@/services/modelList'; +import { useGlobalStore } from '@/store/global'; + +export const getModelList = async (brand = 'openAI') => { + const setSettings = useGlobalStore.getState().setSettings; + const models = await fetchModelList(); + + setSettings({ languageModel: { [brand]: { models } } }); +}; diff --git a/src/pages/settings/features/Settings/LLM/index.tsx b/src/pages/settings/features/Settings/LLM/index.tsx new file mode 100644 index 000000000000..1c61bc3844af --- /dev/null +++ b/src/pages/settings/features/Settings/LLM/index.tsx @@ -0,0 +1,152 @@ +import { Form, Markdown } from '@lobehub/ui'; +import { Form as AntForm, AutoComplete, Input, Switch } from 'antd'; +import { createStyles } from 'antd-style'; +import { debounce } from 'lodash-es'; +import { Webhook } from 'lucide-react'; +import Link from 'next/link'; +import { memo } from 'react'; +import { Trans, useTranslation } from 'react-i18next'; +import { Flexbox } from 'react-layout-kit'; + +import { FORM_STYLE } from '@/const/layoutTokens'; +import { globalSelectors, useEffectAfterGlobalHydrated, useGlobalStore } from '@/store/global'; + +import Checker from './Checker'; + +const useStyles = createStyles(({ css, token }) => ({ + markdown: css` + a { + font-size: 12px !important; + } + + p { + font-size: 12px !important; + color: ${token.colorTextDescription} !important; + } + `, + plan: css` + color: ${token.colorTextDescription}; + `, + tip: css` + font-size: 12px; + color: ${token.colorTextDescription}; + `, +})); + +const configKey = 'languageModel'; + +const LLM = memo(() => { + const { t } = useTranslation('setting'); + const [form] = AntForm.useForm(); + const { styles } = useStyles(); + const [setSettings] = useGlobalStore((s) => [s.setSettings]); + + useEffectAfterGlobalHydrated((store) => { + const settings = globalSelectors.currentSettings(store.getState()); + + form.setFieldsValue(settings); + }, []); + + const useAzure = useGlobalStore((s) => s.settings.languageModel.openAI.useAzure); + + const openAI = { + children: [ + { + children: ( + + ), + desc: useAzure ? t('llm.AzureOpenAI.token.desc') : t('llm.OpenAI.token.desc'), + label: useAzure ? t('llm.AzureOpenAI.token.title') : t('llm.OpenAI.token.title'), + name: [configKey, 'openAI', 'OPENAI_API_KEY'], + }, + { + children: ( + + ), + desc: useAzure ? t('llm.AzureOpenAI.endpoint.desc') : t('llm.OpenAI.endpoint.desc'), + + label: useAzure ? t('llm.AzureOpenAI.endpoint.title') : t('llm.OpenAI.endpoint.title'), + + name: [configKey, 'openAI', 'endpoint'], + }, + { + children: , + desc: t('llm.OpenAI.useAzure.desc'), + label: t('llm.OpenAI.useAzure.title'), + name: [configKey, 'openAI', 'useAzure'], + valuePropName: 'checked', + }, + { + children: ( + ({ + label: i, + value: i, + }))} + placeholder={'20XX-XX-XX'} + /> + ), + desc: ( + {t('llm.OpenAI.azureApiVersion.desc')} + ), + hidden: !useAzure, + label: t('llm.OpenAI.azureApiVersion.title'), + name: [configKey, 'openAI', 'azureApiVersion'], + }, + { + children: , + desc: t('llm.OpenAI.check.desc'), + label: t('llm.OpenAI.check.title'), + }, + // { + // children: useAzure ? {t('llm.OpenAI.models.notSupport')} : , + // desc: useAzure ? t('llm.OpenAI.models.notSupportTip') : t('llm.OpenAI.models.desc'), + // label: t('llm.OpenAI.models.title'), + // name: [configKey, 'openAI', 'models'], + // }, + ], + icon: Webhook, + title: t('llm.OpenAI.title'), + }; + + return ( + + + +
+ + 更多模型正在 + + 计划接入 + + 中 ,敬请期待 ✨ + +
+
+
+ ); +}); + +export default LLM; diff --git a/src/pages/settings/features/Settings/index.tsx b/src/pages/settings/features/Settings/index.tsx index 21550e34c420..f733fadfd8ff 100644 --- a/src/pages/settings/features/Settings/index.tsx +++ b/src/pages/settings/features/Settings/index.tsx @@ -2,13 +2,16 @@ import { GridBackground, Icon, Logo, TabsNav } from '@lobehub/ui'; import { createStyles } from 'antd-style'; import { PackageCheck } from 'lucide-react'; import { rgba } from 'polished'; -import { memo, useState } from 'react'; +import { memo } from 'react'; import { useTranslation } from 'react-i18next'; import { Center, Flexbox } from 'react-layout-kit'; +import { useGlobalStore } from '@/store/global'; + import pkg from '../../../../../package.json'; import Agent from './Agent'; import Common from './Common'; +import LLM from './LLM'; const useStyles = createStyles(({ css, token, isDarkMode }) => ({ background: css` @@ -55,16 +58,12 @@ const useStyles = createStyles(({ css, token, isDarkMode }) => ({ `, })); -enum Tabs { - agent = 'agent', - common = 'common', -} - const Settings = memo(() => { - const [tab, setTab] = useState(Tabs.common); const { styles, theme } = useStyles(); const { t } = useTranslation('setting'); + const [tab, setTab] = useGlobalStore((s) => [s.settingsTab, s.switchSettingTabs]); + return (
@@ -78,10 +77,11 @@ const Settings = memo(() => { setTab(e as Tabs)} + onChange={(e) => setTab(e as any)} />
@@ -91,8 +91,9 @@ const Settings = memo(() => {
{`${pkg.version}`}
- {tab === Tabs.common && } - {tab === Tabs.agent && } + {tab === 'common' && } + {tab === 'llm' && } + {tab === 'agent' && }
); }); diff --git a/src/services/_header.ts b/src/services/_header.ts new file mode 100644 index 000000000000..82d79a4f587b --- /dev/null +++ b/src/services/_header.ts @@ -0,0 +1,33 @@ +import { + AZURE_OPENAI_API_VERSION, + LOBE_CHAT_ACCESS_CODE, + OPENAI_API_KEY_HEADER_KEY, + OPENAI_END_POINT, + USE_AZURE_OPENAI, +} from '@/const/fetch'; +import { useGlobalStore } from '@/store/global'; + +// eslint-disable-next-line no-undef +export const createHeaderWithOpenAI = (header?: HeadersInit): HeadersInit => { + const openai = useGlobalStore.getState().settings.languageModel.openAI; + + const apiKey = openai.OPENAI_API_KEY || useGlobalStore.getState().settings.OPENAI_API_KEY || ''; + const endpoint = openai.endpoint || useGlobalStore.getState().settings.endpoint || ''; + + // eslint-disable-next-line no-undef + const result: HeadersInit = { + ...header, + [LOBE_CHAT_ACCESS_CODE]: useGlobalStore.getState().settings.password || '', + [OPENAI_API_KEY_HEADER_KEY]: apiKey, + [OPENAI_END_POINT]: endpoint, + }; + + if (openai.useAzure) { + Object.assign(result, { + [AZURE_OPENAI_API_VERSION]: openai.azureApiVersion, + [USE_AZURE_OPENAI]: '1', + }); + } + + return result; +}; diff --git a/src/services/_url.ts b/src/services/_url.ts new file mode 100644 index 000000000000..6b49a94acb98 --- /dev/null +++ b/src/services/_url.ts @@ -0,0 +1,17 @@ +const isDev = process.env.NODE_ENV === 'development'; + +const prefix = isDev ? '-dev' : ''; + +export const URLS = { + plugins: '/api/plugins' + prefix, +}; + +export const OPENAI_URLS = { + chat: '/api/openai/chat' + prefix, + models: '/api/openai/models' + prefix, +}; + +export const AZURE_OPENAI_URLS = { + chat: '/api/azure-openai/chat' + prefix, + models: '/api/azure-openai/models' + prefix, +}; diff --git a/src/services/chatModel.ts b/src/services/chatModel.ts index e371aea71fe5..9df970e336fa 100644 --- a/src/services/chatModel.ts +++ b/src/services/chatModel.ts @@ -1,12 +1,11 @@ import { merge } from 'lodash-es'; -import { LOBE_CHAT_ACCESS_CODE, OPENAI_API_KEY_HEADER_KEY, OPENAI_END_POINT } from '@/const/fetch'; -import { useGlobalStore } from '@/store/global'; import { pluginSelectors, usePluginStore } from '@/store/plugin'; import { initialLobeAgentConfig } from '@/store/session/initialState'; import type { ChatCompletionFunctions, OpenAIStreamPayload } from '@/types/openai'; -import { URLS } from './url'; +import { createHeaderWithOpenAI } from './_header'; +import { OPENAI_URLS } from './_url'; interface FetchChatModelOptions { signal?: AbortSignal | undefined; @@ -35,14 +34,9 @@ export const fetchChatModel = ( const functions = filterFunctions.length === 0 ? undefined : filterFunctions; - return fetch(URLS.openai, { + return fetch(OPENAI_URLS.chat, { body: JSON.stringify({ ...payload, functions }), - headers: { - 'Content-Type': 'application/json', - [LOBE_CHAT_ACCESS_CODE]: useGlobalStore.getState().settings.password || '', - [OPENAI_API_KEY_HEADER_KEY]: useGlobalStore.getState().settings.OPENAI_API_KEY || '', - [OPENAI_END_POINT]: useGlobalStore.getState().settings.endpoint || '', - }, + headers: createHeaderWithOpenAI({ 'Content-Type': 'application/json' }), method: 'POST', signal: options?.signal, }); diff --git a/src/services/modelList.ts b/src/services/modelList.ts new file mode 100644 index 000000000000..b91a0d9934c8 --- /dev/null +++ b/src/services/modelList.ts @@ -0,0 +1,15 @@ +import { LanguageModelWhiteList } from '@/const/llm'; + +import { createHeaderWithOpenAI } from './_header'; +import { OPENAI_URLS } from './_url'; + +export const fetchModelList = async (): Promise => { + const res = await fetch(OPENAI_URLS.models, { + headers: createHeaderWithOpenAI(), + method: 'POST', + }); + + const modelList: string[] = await res.json(); + + return LanguageModelWhiteList.filter((i) => modelList.includes(i)); +}; diff --git a/src/services/plugin.ts b/src/services/plugin.ts index f0c0054943b7..8e01d2f44eff 100644 --- a/src/services/plugin.ts +++ b/src/services/plugin.ts @@ -3,7 +3,7 @@ import { PluginRequestPayload, createHeadersWithPluginSettings } from '@lobehub/ import { pluginSelectors, usePluginStore } from '@/store/plugin'; import { getMessageError } from '@/utils/fetch'; -import { URLS } from './url'; +import { URLS } from './_url'; interface FetchChatModelOptions { signal?: AbortSignal | undefined; diff --git a/src/services/pluginMarket.ts b/src/services/pluginMarket.ts index 9f992845fd87..e93f130e026d 100644 --- a/src/services/pluginMarket.ts +++ b/src/services/pluginMarket.ts @@ -2,7 +2,7 @@ import { getPluginIndexJSON } from '@/const/url'; import { useGlobalStore } from '@/store/global'; /** - * 请求插件列表 + * fetch Plugin Market List */ export const getPluginList = async () => { const url = getPluginIndexJSON(useGlobalStore.getState().settings.language); diff --git a/src/services/url.ts b/src/services/url.ts deleted file mode 100644 index f18b774bd92c..000000000000 --- a/src/services/url.ts +++ /dev/null @@ -1,8 +0,0 @@ -const isDev = process.env.NODE_ENV === 'development'; - -const prefix = isDev ? '-dev' : ''; - -export const URLS = { - openai: '/api/openai' + prefix, - plugins: '/api/plugins' + prefix, -}; diff --git a/src/store/global/hooks/index.ts b/src/store/global/hooks/index.ts index 818f44eefc5f..8d8dc9d9ae44 100644 --- a/src/store/global/hooks/index.ts +++ b/src/store/global/hooks/index.ts @@ -1,3 +1,4 @@ +export * from './useEffectAfterHydrated'; export * from './useHydrated'; export * from './useOnFinishHydrationGlobal'; export * from './useSwitchSideBarOnInit'; diff --git a/src/store/global/hooks/useEffectAfterHydrated.ts b/src/store/global/hooks/useEffectAfterHydrated.ts new file mode 100644 index 000000000000..8cb9caefb4d0 --- /dev/null +++ b/src/store/global/hooks/useEffectAfterHydrated.ts @@ -0,0 +1,22 @@ +import { useEffect } from 'react'; + +import { useGlobalStore } from '../store'; + +export const useEffectAfterGlobalHydrated = ( + fn: (store: typeof useGlobalStore) => void, + deps: any[] = [], +) => { + useEffect(() => { + const hasRehydrated = useGlobalStore.persist.hasHydrated(); + + if (hasRehydrated) { + // 等价 useEffect 多次触发 + fn(useGlobalStore); + } else { + // 等价于 useEffect 第一次触发 + useGlobalStore.persist.onFinishHydration(() => { + fn(useGlobalStore); + }); + } + }, deps); +}; diff --git a/src/store/global/initialState.ts b/src/store/global/initialState.ts index 896fd165fb27..42d0c1239b03 100644 --- a/src/store/global/initialState.ts +++ b/src/store/global/initialState.ts @@ -3,6 +3,8 @@ import type { GlobalSettings } from '@/types/settings'; export type SidebarTabKey = 'chat' | 'market' | 'settings'; +export type SettingsTabs = 'agent' | 'common' | 'llm'; + export interface Guide { // Topic 引导 topic?: boolean; @@ -19,6 +21,7 @@ export interface GlobalState { * 用户设置 */ settings: GlobalSettings; + settingsTab: SettingsTabs; sidebarKey: SidebarTabKey; } @@ -41,5 +44,6 @@ export const initialState: GlobalState = { showSessionPanel: true, }, settings: DEFAULT_SETTINGS, + settingsTab: 'common', sidebarKey: 'chat', }; diff --git a/src/store/global/selectors.test.ts b/src/store/global/selectors.test.ts new file mode 100644 index 000000000000..4979b981ec38 --- /dev/null +++ b/src/store/global/selectors.test.ts @@ -0,0 +1,264 @@ +import { LanguageModel } from '@/types/llm'; + +import { globalSelectors } from './selectors'; +import { GlobalStore } from './store'; + +describe('globalSelectors', () => { + describe('currentSettings', () => { + it('should merge DEFAULT_SETTINGS and s.settings correctly', () => { + const s = { + settings: { + avatar: 'avatar.jpg', + fontSize: 14, + language: 'en-US', + neutralColor: 'sand', + password: 'password123', + primaryColor: 'blue', + themeMode: 'light', + defaultAgent: { + config: { + systemRole: '', + model: LanguageModel.GPT3_5, + params: {}, + }, + meta: { + avatar: 'Default Agent', + description: 'Default agent for testing', + }, + }, + languageModel: { + openAI: { + OPENAI_API_KEY: 'openai-api-key', + endpoint: 'https://openai-endpoint.com', + models: ['gpt-3.5-turbo'], + }, + }, + }, + } as unknown as GlobalStore; + + const result = globalSelectors.currentSettings(s); + + expect(result).toEqual({ + avatar: 'avatar.jpg', + fontSize: 14, + language: 'en-US', + neutralColor: 'sand', + password: 'password123', + primaryColor: 'blue', + themeMode: 'light', + defaultAgent: { + config: { + displayMode: 'chat', + historyCount: 1, + systemRole: '', + model: LanguageModel.GPT3_5, + params: { + frequency_penalty: 0, + presence_penalty: 0, + temperature: 0.6, + top_p: 1, + }, + plugins: [], + }, + meta: { + avatar: 'Default Agent', + description: 'Default agent for testing', + }, + }, + languageModel: { + openAI: { + OPENAI_API_KEY: 'openai-api-key', + azureApiVersion: '2023-08-01-preview', + endpoint: 'https://openai-endpoint.com', + models: ['gpt-3.5-turbo'], + }, + }, + }); + }); + }); + // + // describe('defaultAgent', () => { + // it('should merge DEFAULT_AGENT and s.settings.defaultAgent correctly', () => { + // const s: GlobalStore = { + // settings: { + // defaultAgent: { + // config: { + // model: 'gpt-3.5-turbo', + // maxTokens: 100, + // }, + // meta: { + // name: 'Default Agent', + // description: 'Default agent for testing', + // }, + // }, + // }, + // }; + // + // const result = globalSelectors.defaultAgent(s); + // + // + // + // expect(result).toEqual(expected); + // }); + // }); + // + // describe('defaultAgentConfig', () => { + // it('should merge DEFAULT_AGENT_CONFIG and defaultAgent(s).config correctly', () => { + // const s: GlobalStore = { + // settings: { + // defaultAgent: { + // config: { + // model: 'gpt-3.5-turbo', + // maxTokens: 100, + // }, + // }, + // }, + // }; + // + // const result = globalSelectors.defaultAgentConfig(s); + // + // const defaultAgent = globalSelectors.defaultAgent(s); + // const expected = merge({}, DEFAULT_AGENT_CONFIG, defaultAgent.config); + // + // expect(result).toEqual(expected); + // }); + // }); + // + // describe('defaultAgentMeta', () => { + // it('should merge DEFAULT_AGENT_META and defaultAgent(s).meta correctly', () => { + // const s: GlobalStore = { + // settings: { + // defaultAgent: { + // meta: { + // name: 'Default Agent', + // description: 'Default agent for testing', + // }, + // }, + // }, + // }; + // + // const result = globalSelectors.defaultAgentMeta(s); + // + // const defaultAgent = globalSelectors.defaultAgent(s); + // const expected = merge({}, DEFAULT_AGENT_META, defaultAgent.meta); + // + // expect(result).toEqual(expected); + // }); + // }); + // + // describe('exportSettings', () => { + // it('should remove OPENAI_API_KEY and password fields from s.settings', () => { + // const s: GlobalStore = { + // settings: { + // OPENAI_API_KEY: 'openai-api-key', + // password: 'password123', + // avatar: 'avatar.jpg', + // fontSize: 14, + // language: 'en', + // neutralColor: 'white', + // primaryColor: 'blue', + // themeMode: 'light', + // defaultAgent: { + // config: { + // model: 'gpt-3.5-turbo', + // maxTokens: 100, + // }, + // meta: { + // name: 'Default Agent', + // description: 'Default agent for testing', + // }, + // }, + // languageModel: { + // azureOpenAI: { + // AZURE_API_KEY: 'azure-api-key', + // apiVersion: 'v1', + // endpoint: 'https://azure-openai-endpoint.com', + // models: ['gpt-3.5-turbo'], + // }, + // openAI: { + // OPENAI_API_KEY: 'openai-api-key', + // endpoint: 'https://openai-endpoint.com', + // models: ['gpt-3.5-turbo'], + // }, + // }, + // }, + // }; + // + // const result = globalSelectors.exportSettings(s); + // + // const expected = { + // avatar: 'avatar.jpg', + // fontSize: 14, + // language: 'en', + // neutralColor: 'white', + // primaryColor: 'blue', + // themeMode: 'light', + // defaultAgent: { + // config: { + // model: 'gpt-3.5-turbo', + // maxTokens: 100, + // }, + // meta: { + // name: 'Default Agent', + // description: 'Default agent for testing', + // }, + // }, + // languageModel: { + // azureOpenAI: { + // AZURE_API_KEY: 'azure-api-key', + // apiVersion: 'v1', + // endpoint: 'https://azure-openai-endpoint.com', + // models: ['gpt-3.5-turbo'], + // }, + // openAI: { + // endpoint: 'https://openai-endpoint.com', + // models: ['gpt-3.5-turbo'], + // }, + // }, + // }; + // + // expect(result).toEqual(expected); + // }); + // + // it('should return the result as GlobalSettings type', () => { + // const s: GlobalStore = { + // settings: { + // avatar: 'avatar.jpg', + // fontSize: 14, + // language: 'en', + // neutralColor: 'white', + // password: 'password123', + // primaryColor: 'blue', + // themeMode: 'light', + // defaultAgent: { + // config: { + // model: 'gpt-3.5-turbo', + // maxTokens: 100, + // }, + // meta: { + // name: 'Default Agent', + // description: 'Default agent for testing', + // }, + // }, + // languageModel: { + // azureOpenAI: { + // AZURE_API_KEY: 'azure-api-key', + // apiVersion: 'v1', + // endpoint: 'https://azure-openai-endpoint.com', + // models: ['gpt-3.5-turbo'], + // }, + // openAI: { + // OPENAI_API_KEY: 'openai-api-key', + // endpoint: 'https://openai-endpoint.com', + // models: ['gpt-3.5-turbo'], + // }, + // }, + // }, + // }; + // + // const result = globalSelectors.exportSettings(s); + // + // expect(result).toBeInstanceOf(GlobalSettings); + // }); + // }); +}); diff --git a/src/store/global/selectors.ts b/src/store/global/selectors.ts index fe61110d9681..bff03853efce 100644 --- a/src/store/global/selectors.ts +++ b/src/store/global/selectors.ts @@ -5,14 +5,13 @@ import { merge } from '@/utils/merge'; import { GlobalStore } from './store'; -const currentSettings = (s: GlobalStore) => merge({}, DEFAULT_SETTINGS, s.settings); +const currentSettings = (s: GlobalStore) => merge(DEFAULT_SETTINGS, s.settings); -const defaultAgent = (s: GlobalStore) => merge({}, DEFAULT_AGENT, s.settings.defaultAgent); +const defaultAgent = (s: GlobalStore) => merge(DEFAULT_AGENT, s.settings.defaultAgent); -const defaultAgentConfig = (s: GlobalStore) => - merge({}, DEFAULT_AGENT_CONFIG, defaultAgent(s).config); +const defaultAgentConfig = (s: GlobalStore) => merge(DEFAULT_AGENT_CONFIG, defaultAgent(s).config); -const defaultAgentMeta = (s: GlobalStore) => merge({}, DEFAULT_AGENT_META, defaultAgent(s).meta); +const defaultAgentMeta = (s: GlobalStore) => merge(DEFAULT_AGENT_META, defaultAgent(s).meta); export const exportSettings = (s: GlobalStore) => { // eslint-disable-next-line @typescript-eslint/no-unused-vars diff --git a/src/store/global/slices/settings.ts b/src/store/global/slices/settings.ts index 6ee674b6ca61..c39e6977b50b 100644 --- a/src/store/global/slices/settings.ts +++ b/src/store/global/slices/settings.ts @@ -1,10 +1,13 @@ import { ThemeMode } from 'antd-style'; +import isEqual from 'fast-deep-equal'; import { produce } from 'immer'; -import { merge } from 'lodash-es'; +import { DeepPartial } from 'utility-types'; import type { StateCreator } from 'zustand/vanilla'; import { DEFAULT_AGENT, DEFAULT_SETTINGS } from '@/const/settings'; +import { SettingsTabs } from '@/store/global/initialState'; import type { GlobalSettings } from '@/types/settings'; +import { merge } from '@/utils/merge'; import { setNamespace } from '@/utils/storeDebug'; import type { GlobalStore } from '../store'; @@ -24,7 +27,8 @@ export interface SettingsAction { * 设置部分配置设置 * @param settings - 部分配置设置 */ - setSettings: (settings: Partial) => void; + setSettings: (settings: DeepPartial) => void; + switchSettingTabs: (tab: SettingsTabs) => void; /** * 设置主题模式 * @param themeMode - 主题模式 @@ -58,11 +62,19 @@ export const createSettingsSlice: StateCreator< resetSettings: () => { set({ settings: DEFAULT_SETTINGS }, false, t('resetSettings')); }, - setSettings: (settings) => { const oldSetting = get().settings; - set({ settings: merge({}, oldSetting, settings) }, false, t('setSettings', settings)); + const nextSettings = merge(oldSetting, settings); + + if (isEqual(oldSetting, nextSettings)) return; + + set({ settings: merge(oldSetting, settings) }, false, t('setSettings', settings)); }, + + switchSettingTabs: (tab) => { + set({ settingsTab: tab }); + }, + switchThemeMode: (themeMode) => { get().setSettings({ themeMode }); }, diff --git a/src/store/global/store.ts b/src/store/global/store.ts index df536c9f06a1..99fdd498ffa5 100644 --- a/src/store/global/store.ts +++ b/src/store/global/store.ts @@ -4,9 +4,10 @@ import { shallow } from 'zustand/shallow'; import { createWithEqualityFn } from 'zustand/traditional'; import { StateCreator } from 'zustand/vanilla'; -import { DEFAULT_AGENT } from '@/const/settings'; +import { DEFAULT_AGENT, DEFAULT_LLM_CONFIG } from '@/const/settings'; import { isDev } from '@/utils/env'; +import { createHyperStorage } from '../middleware/createHyperStorage'; import { type GlobalState, initialState } from './initialState'; import { type AgentAction, createAgentSlice } from './slices/agent'; import { type CommonAction, createCommonSlice } from './slices/common'; @@ -29,6 +30,7 @@ type GlobalPersist = Pick; const persistOptions: PersistOptions = { merge: (persistedState, currentState) => { const state = persistedState as GlobalPersist; + return { ...currentState, ...state, @@ -36,15 +38,42 @@ const persistOptions: PersistOptions = { if (!draft.defaultAgent) { draft.defaultAgent = DEFAULT_AGENT; } + delete draft.enableMaxTokens; + delete draft.enableHistoryCount; + delete draft.historyCount; + delete draft.enableCompressThreshold; + delete draft.compressThreshold; + + // migration to new data model + if (!draft.languageModel) { + draft.languageModel = { + openAI: { + ...DEFAULT_LLM_CONFIG.openAI, + OPENAI_API_KEY: draft.OPENAI_API_KEY || DEFAULT_LLM_CONFIG.openAI.OPENAI_API_KEY, + endpoint: draft.endpoint || DEFAULT_LLM_CONFIG.openAI.OPENAI_API_KEY, + }, + }; + + delete draft.OPENAI_API_KEY; + delete draft.endpoint; + } }), }; }, name: 'LOBE_SETTINGS', - partialize: (s) => ({ - preference: s.preference, - settings: s.settings, - }), + skipHydration: true, + + storage: createHyperStorage({ + localStorage: { + dbName: 'LobeHub', + selectors: ['preference', 'settings'], + }, + url: { + mode: 'hash', + selectors: [{ settingsTab: 'tab' }], + }, + }), }; // =============== 实装 useStore ============ // diff --git a/src/types/settings.ts b/src/types/settings.ts index dc935ec445c6..0570d13ad67a 100644 --- a/src/types/settings.ts +++ b/src/types/settings.ts @@ -5,15 +5,37 @@ import type { Locales } from '@/locales/resources'; import type { LobeAgentSession } from '@/types/session'; export interface GlobalBaseSettings { - OPENAI_API_KEY: string; + /** + * @deprecated + */ + OPENAI_API_KEY?: string; + avatar: string; - compressThreshold: number; - enableCompressThreshold: boolean; - enableHistoryCount: boolean; - enableMaxTokens: boolean; - endpoint: string; + /** + * @deprecated + */ + compressThreshold?: number; + /** + * @deprecated + */ + enableCompressThreshold?: boolean; + /** + * @deprecated + */ + enableHistoryCount?: boolean; + /** + * @deprecated + */ + enableMaxTokens?: boolean; + /** + * @deprecated + */ + endpoint?: string; fontSize: number; - historyCount: number; + /** + * @deprecated + */ + historyCount?: number; language: Locales; neutralColor: NeutralColors | ''; password: string; @@ -21,13 +43,28 @@ export interface GlobalBaseSettings { themeMode: ThemeMode; } -export type GlobalDefaultAgent = Partial; +export type GlobalDefaultAgent = Pick; + +interface OpenAIConfig { + OPENAI_API_KEY: string; + azureApiVersion?: string; + endpoint?: string; + models?: string[]; + useAzure?: boolean; +} + +export type GlobalLLMConfig = { + openAI: OpenAIConfig; +}; + +export type LLMBrand = keyof GlobalLLMConfig; /** * 配置设置 */ export interface GlobalSettings extends GlobalBaseSettings { defaultAgent: GlobalDefaultAgent; + languageModel: GlobalLLMConfig; } export type ConfigKeys = keyof GlobalSettings; diff --git a/src/utils/fetch.ts b/src/utils/fetch.ts index f65def52d055..02121183596c 100644 --- a/src/utils/fetch.ts +++ b/src/utils/fetch.ts @@ -74,7 +74,7 @@ interface FetchAITaskResultParams { /** * 错误处理函数 */ - onError?: (e: Error) => void; + onError?: (e: Error, rawError?: any) => void; /** * 加载状态变化处理函数 * @param loading - 是否处于加载状态 @@ -101,27 +101,19 @@ export const fetchAIFactory = onLoadingChange, abortController, }: FetchAITaskResultParams) => { - const errorHandle = (error: Error) => { + const errorHandle = (error: Error, errorContent?: any) => { onLoadingChange?.(false); if (abortController?.signal.aborted) { - // notification.primaryInfo({ - // message: '已中断当前节点的执行任务', - // }); return; } - - // notification?.error({ - // message: `请求失败(${error.message})`, - // placement: 'bottomRight', - // }); - onError?.(error); + onError?.(error, errorContent); }; onLoadingChange?.(true); const data = await fetchSSE(() => fetcher(params, { signal: abortController?.signal }), { onErrorHandle: (error) => { - errorHandle(new Error(error.message)); + errorHandle(new Error(error.message), error); }, onMessageHandle, }).catch(errorHandle);