diff --git a/next.config.mjs b/next.config.mjs
index 6aece5e24ba7..7172072c291a 100644
--- a/next.config.mjs
+++ b/next.config.mjs
@@ -27,8 +27,12 @@ const nextConfig = {
async rewrites() {
return [
{
- source: '/api/openai-dev',
- destination: `${API_END_PORT_URL}/api/openai`,
+ source: '/api/openai/chat-dev',
+ destination: `${API_END_PORT_URL}/api/openai/chat`,
+ },
+ {
+ source: '/api/openai/models-dev',
+ destination: `${API_END_PORT_URL}/api/openai/models`,
},
{
source: '/api/plugins-dev',
diff --git a/package.json b/package.json
index 0e9a57f4266e..9de0ebac2271 100644
--- a/package.json
+++ b/package.json
@@ -65,11 +65,12 @@
"dependencies": {
"@ant-design/colors": "^7",
"@ant-design/icons": "^5",
+ "@azure/openai": "latest",
"@emoji-mart/data": "^1",
"@emoji-mart/react": "^1",
"@icons-pack/react-simple-icons": "^9",
- "@lobehub/chat-plugin-sdk": "^1.17.0",
- "@lobehub/chat-plugins-gateway": "^1.5.0",
+ "@lobehub/chat-plugin-sdk": "^1.17.7",
+ "@lobehub/chat-plugins-gateway": "^1.5.1",
"@lobehub/ui": "latest",
"@vercel/analytics": "^1",
"ahooks": "^3",
@@ -96,11 +97,11 @@
"react-i18next": "^13",
"react-intersection-observer": "^9",
"react-layout-kit": "^1.7.1",
- "serpapi": "^2",
"swr": "^2",
"systemjs": "^6.14.2",
"ts-md5": "^1",
"use-merge-value": "^1",
+ "utility-types": "^3",
"uuid": "^9",
"zustand": "^4.4",
"zustand-utils": "^1"
@@ -142,12 +143,6 @@
"typescript": "^5",
"vitest": "latest"
},
- "peerDependencies": {
- "antd": ">=5",
- "antd-style": ">=3",
- "react": ">=18",
- "react-dom": ">=18"
- },
"publishConfig": {
"access": "public",
"registry": "https://registry.npmjs.org"
diff --git a/src/config/server.ts b/src/config/server.ts
index bc88c14d44bf..3007931591a4 100644
--- a/src/config/server.ts
+++ b/src/config/server.ts
@@ -3,6 +3,7 @@ declare global {
namespace NodeJS {
interface ProcessEnv {
ACCESS_CODE?: string;
+ AZURE_API_KEY?: string;
OPENAI_API_KEY?: string;
OPENAI_PROXY_URL?: string;
}
@@ -16,6 +17,7 @@ export const getServerConfig = () => {
return {
ACCESS_CODE: process.env.ACCESS_CODE,
+ AZURE_API_KEY: process.env.AZURE_API_KEY,
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
OPENAI_PROXY_URL: process.env.OPENAI_PROXY_URL,
};
diff --git a/src/const/fetch.ts b/src/const/fetch.ts
index 1155fad4dd42..334b5a3870fb 100644
--- a/src/const/fetch.ts
+++ b/src/const/fetch.ts
@@ -1,7 +1,20 @@
export const OPENAI_END_POINT = 'X-OPENAI-END_POINT';
-
export const OPENAI_API_KEY_HEADER_KEY = 'X-OPENAI-API-KEY';
+export const USE_AZURE_OPENAI = 'X-USE_AZURE_OPENAI';
+
+export const AZURE_OPENAI_API_VERSION = 'X-AZURE_OPENAI_API_VERSION';
+
export const LOBE_CHAT_ACCESS_CODE = 'X-LOBE_CHAT_ACCESS_CODE';
-export const LOBE_PLUGIN_SETTINGS = 'X-LOBE_PLUGIN_SETTINGS';
+export const getOpenAIAuthFromRequest = (req: Request) => {
+ const apiKey = req.headers.get(OPENAI_API_KEY_HEADER_KEY);
+ const endpoint = req.headers.get(OPENAI_END_POINT);
+ const accessCode = req.headers.get(LOBE_CHAT_ACCESS_CODE);
+ const useAzureStr = req.headers.get(USE_AZURE_OPENAI);
+ const apiVersion = req.headers.get(AZURE_OPENAI_API_VERSION);
+
+ const useAzure = !!useAzureStr;
+
+ return { accessCode, apiKey, apiVersion, endpoint, useAzure };
+};
diff --git a/src/const/llm.ts b/src/const/llm.ts
new file mode 100644
index 000000000000..da849775d93f
--- /dev/null
+++ b/src/const/llm.ts
@@ -0,0 +1,17 @@
+/**
+ * A white list of language models that are allowed to display and be used in the app.
+ */
+export const LanguageModelWhiteList = [
+ // OpenAI
+ 'gpt-3.5-turbo',
+ 'gpt-3.5-turbo-16k',
+ 'gpt-4',
+ 'gpt-4-32k',
+];
+
+export const DEFAULT_OPENAI_MODEL_LIST = [
+ 'gpt-3.5-turbo',
+ 'gpt-3.5-turbo-16k',
+ 'gpt-4',
+ 'gpt-4-32k',
+];
diff --git a/src/const/settings.ts b/src/const/settings.ts
index f12d81481e07..32be379d93c7 100644
--- a/src/const/settings.ts
+++ b/src/const/settings.ts
@@ -1,18 +1,17 @@
+import { DEFAULT_OPENAI_MODEL_LIST } from '@/const/llm';
import { DEFAULT_AGENT_META } from '@/const/meta';
import { LanguageModel } from '@/types/llm';
import { LobeAgentConfig } from '@/types/session';
-import { GlobalBaseSettings, GlobalDefaultAgent, GlobalSettings } from '@/types/settings';
+import {
+ GlobalBaseSettings,
+ GlobalDefaultAgent,
+ GlobalLLMConfig,
+ GlobalSettings,
+} from '@/types/settings';
export const DEFAULT_BASE_SETTINGS: GlobalBaseSettings = {
- OPENAI_API_KEY: '',
avatar: '',
- compressThreshold: 24,
- enableCompressThreshold: false,
- enableHistoryCount: false,
- enableMaxTokens: true,
- endpoint: '',
fontSize: 14,
- historyCount: 24,
language: 'zh-CN',
neutralColor: '',
password: '',
@@ -34,6 +33,14 @@ export const DEFAULT_AGENT_CONFIG: LobeAgentConfig = {
systemRole: '',
};
+export const DEFAULT_LLM_CONFIG: GlobalLLMConfig = {
+ openAI: {
+ OPENAI_API_KEY: '',
+ azureApiVersion: '2023-08-01-preview',
+ models: DEFAULT_OPENAI_MODEL_LIST,
+ },
+};
+
export const DEFAULT_AGENT: GlobalDefaultAgent = {
config: DEFAULT_AGENT_CONFIG,
meta: DEFAULT_AGENT_META,
@@ -41,5 +48,6 @@ export const DEFAULT_AGENT: GlobalDefaultAgent = {
export const DEFAULT_SETTINGS: GlobalSettings = {
defaultAgent: DEFAULT_AGENT,
+ languageModel: DEFAULT_LLM_CONFIG,
...DEFAULT_BASE_SETTINGS,
};
diff --git a/src/locales/default/setting.ts b/src/locales/default/setting.ts
index feeb4db43858..1863cb3f7075 100644
--- a/src/locales/default/setting.ts
+++ b/src/locales/default/setting.ts
@@ -20,6 +20,64 @@ export default {
session: '会话设置',
sessionWithName: '会话设置 · {{name}}',
},
+ llm: {
+ AzureOpenAI: {
+ endpoint: {
+ desc: '从 Azure 门户检查资源时,可在“密钥和终结点”部分中找到此值',
+ placeholder: 'https://docs-test-001.openai.azure.com',
+ title: 'Azure API 地址',
+ },
+ models: {
+ desc: '支持的模型',
+ title: '模型列表',
+ },
+ title: 'Azure OpenAI 设置',
+ token: {
+ desc: '从 Azure 门户检查资源时,可在“密钥和终结点”部分中找到此值。 可以使用 KEY1 或 KEY2',
+ placeholder: 'Azure API Key',
+ title: 'API Key',
+ },
+ },
+ OpenAI: {
+ azureApiVersion: {
+ desc: 'Azure 的 API 版本,遵循 YYYY-MM-DD 格式,查阅[最新版本](https://learn.microsoft.com/zh-cn/azure/ai-services/openai/reference#chat-completions)',
+ fetch: '获取列表',
+ title: 'Azure Api Version',
+ },
+ check: {
+ button: '检查',
+ desc: '测试 Api Key 与代理地址是否正确填写',
+ pass: '检查通过',
+ title: '连通性检查',
+ },
+ endpoint: {
+ desc: '除默认地址外,必须包含 http(s)://',
+ placeholder: 'https://api.openai.com/v1',
+ title: '接口代理地址',
+ },
+ models: {
+ count: '共支持 {{count}} 个模型',
+ desc: '支持的模型',
+ fetch: '获取模型列表',
+ notSupport: 'Azure OpenAI 暂不支持查看模型列表',
+ notSupportTip: '你需要自行确保部署名称与模型名称一致',
+ refetch: '重新获取模型列表',
+ title: '模型列表',
+ },
+ title: 'OpenAI 设置',
+ token: {
+ desc: '使用自己的 OpenAI Key',
+ placeholder: 'OpenAI API Key',
+ title: 'API Key',
+ },
+ useAzure: {
+ desc: '使用 Azure 提供的 OpenAI 服务',
+ fetch: '获取列表',
+ title: 'Azure OpenAI',
+ },
+ },
+ waitingForMore: '更多模型正在 <1>计划接入1> 中,敬请期待 ✨',
+ },
settingAgent: {
avatar: {
title: '头像',
@@ -114,19 +172,6 @@ export default {
title: '核采样',
},
},
- settingOpenAI: {
- endpoint: {
- desc: '除默认地址外,必须包含 http(s)://',
- placeholder: 'https://api.openai.com/v1',
- title: '接口代理地址',
- },
- title: 'OpenAI 设置',
- token: {
- desc: '使用自己的 OpenAI Key',
- placeholder: 'OpenAI API Key',
- title: 'API Key',
- },
- },
settingPlugin: {
add: '添加',
addTooltip: '添加自定义插件',
@@ -173,5 +218,6 @@ export default {
tab: {
agent: '默认助手',
common: '通用设置',
+ llm: '语言模型',
},
};
diff --git a/src/pages/api/openai.ts b/src/pages/api/createChatCompletion.ts
similarity index 50%
rename from src/pages/api/openai.ts
rename to src/pages/api/createChatCompletion.ts
index c4121432d9e6..7b5732be5153 100644
--- a/src/pages/api/openai.ts
+++ b/src/pages/api/createChatCompletion.ts
@@ -1,46 +1,17 @@
import { OpenAIStream, StreamingTextResponse } from 'ai';
-import OpenAI, { ClientOptions } from 'openai';
+import OpenAI from 'openai';
-import { getServerConfig } from '@/config/server';
import { createErrorResponse } from '@/pages/api/error';
import { ChatErrorType } from '@/types/fetch';
import { OpenAIStreamPayload } from '@/types/openai';
-// 创建 OpenAI 实例
-export const createOpenAI = (userApiKey: string | null, endpoint?: string | null) => {
- const { OPENAI_API_KEY, OPENAI_PROXY_URL } = getServerConfig();
-
- const baseURL = endpoint ? endpoint : OPENAI_PROXY_URL ? OPENAI_PROXY_URL : undefined;
-
- const config: ClientOptions = {
- apiKey: !userApiKey ? OPENAI_API_KEY : userApiKey,
- };
-
- // a bug with openai: https://github.com/openai/openai-node/issues/283
- // TODO: should refactor when openai fix the bug
- if (baseURL) {
- config.baseURL = baseURL;
- }
-
- return new OpenAI(config);
-};
-
interface CreateChatCompletionOptions {
- OPENAI_API_KEY: string | null;
- endpoint?: string | null;
+ openai: OpenAI;
payload: OpenAIStreamPayload;
}
-export const createChatCompletion = async ({
- payload,
- OPENAI_API_KEY,
- endpoint,
-}: CreateChatCompletionOptions) => {
- // ============ 0.创建 OpenAI 实例 ============ //
-
- const openai = createOpenAI(OPENAI_API_KEY, endpoint);
-
- // ============ 1. 前置处理 messages ============ //
+export const createChatCompletion = async ({ payload, openai }: CreateChatCompletionOptions) => {
+ // ============ 1. preprocess messages ============ //
const { messages, ...params } = payload;
const formatMessages = messages.map((m) => ({
@@ -49,7 +20,7 @@ export const createChatCompletion = async ({
role: m.role,
}));
- // ============ 2. 发送请求 ============ //
+ // ============ 2. send api ============ //
try {
const response = await openai.chat.completions.create({
@@ -63,7 +34,7 @@ export const createChatCompletion = async ({
// Check if the error is an OpenAI APIError
if (error instanceof OpenAI.APIError) {
return createErrorResponse(ChatErrorType.OpenAIBizError, {
- endpoint: !!endpoint ? endpoint : undefined,
+ endpoint: openai.baseURL,
error: error.error ?? error.cause,
});
}
@@ -73,7 +44,7 @@ export const createChatCompletion = async ({
// return as a GatewayTimeout error
return createErrorResponse(ChatErrorType.InternalServerError, {
- endpoint,
+ endpoint: openai.baseURL,
error: JSON.stringify(error),
});
}
diff --git a/src/pages/api/openai.api.ts b/src/pages/api/openai.api.ts
deleted file mode 100644
index 2390cd9e0cab..000000000000
--- a/src/pages/api/openai.api.ts
+++ /dev/null
@@ -1,24 +0,0 @@
-import { LOBE_CHAT_ACCESS_CODE, OPENAI_API_KEY_HEADER_KEY, OPENAI_END_POINT } from '@/const/fetch';
-import { ErrorType } from '@/types/fetch';
-import { OpenAIStreamPayload } from '@/types/openai';
-
-import { checkAuth } from './auth';
-import { createErrorResponse } from './error';
-import { createChatCompletion } from './openai';
-
-export const runtime = 'edge';
-
-export default async function handler(req: Request) {
- const payload = (await req.json()) as OpenAIStreamPayload;
- const apiKey = req.headers.get(OPENAI_API_KEY_HEADER_KEY);
- const accessCode = req.headers.get(LOBE_CHAT_ACCESS_CODE);
- const endpoint = req.headers.get(OPENAI_END_POINT);
-
- const result = checkAuth({ accessCode, apiKey });
-
- if (!result.auth) {
- return createErrorResponse(result.error as ErrorType);
- }
-
- return createChatCompletion({ OPENAI_API_KEY: apiKey, endpoint, payload });
-}
diff --git a/src/pages/api/openai/chat.api.ts b/src/pages/api/openai/chat.api.ts
new file mode 100644
index 000000000000..d64ae7bb8e4e
--- /dev/null
+++ b/src/pages/api/openai/chat.api.ts
@@ -0,0 +1,43 @@
+import OpenAI from 'openai';
+
+import { getOpenAIAuthFromRequest } from '@/const/fetch';
+import { ChatErrorType, ErrorType } from '@/types/fetch';
+import { OpenAIStreamPayload } from '@/types/openai';
+
+import { checkAuth } from '../auth';
+import { createChatCompletion } from '../createChatCompletion';
+import { createErrorResponse } from '../error';
+import { createAzureOpenai } from './createAzureOpenai';
+import { createOpenai } from './createOpenai';
+
+export const runtime = 'edge';
+
+export default async function handler(req: Request) {
+ const payload = (await req.json()) as OpenAIStreamPayload;
+
+ const { apiKey, accessCode, endpoint, useAzure, apiVersion } = getOpenAIAuthFromRequest(req);
+
+ const result = checkAuth({ accessCode, apiKey });
+
+ if (!result.auth) {
+ return createErrorResponse(result.error as ErrorType);
+ }
+
+ let openai: OpenAI;
+ if (useAzure) {
+ if (!apiVersion) return createErrorResponse(ChatErrorType.BadRequest);
+
+ // `https://test-001.openai.azure.com/openai/deployments/gpt-35-turbo`,
+ const url = `${endpoint}/openai/deployments/${payload.model.replace('.', '')}`;
+
+ openai = createAzureOpenai({
+ apiVersion,
+ endpoint: url,
+ userApiKey: apiKey,
+ });
+ } else {
+ openai = createOpenai(apiKey, endpoint);
+ }
+
+ return createChatCompletion({ openai, payload });
+}
diff --git a/src/pages/api/openai/createAzureOpenai.ts b/src/pages/api/openai/createAzureOpenai.ts
new file mode 100644
index 000000000000..1108a33ba628
--- /dev/null
+++ b/src/pages/api/openai/createAzureOpenai.ts
@@ -0,0 +1,26 @@
+import OpenAI, { ClientOptions } from 'openai';
+
+import { getServerConfig } from '@/config/server';
+
+// 创建 Azure OpenAI 实例
+export const createAzureOpenai = (params: {
+ apiVersion: string;
+ endpoint: string;
+ userApiKey?: string | null;
+}) => {
+ const { AZURE_API_KEY } = getServerConfig();
+
+ const baseURL = params.endpoint;
+ const apiKey = !params.userApiKey ? AZURE_API_KEY : params.userApiKey;
+
+ const config: ClientOptions = {
+ apiKey: apiKey,
+ baseURL,
+ defaultHeaders: { 'api-key': apiKey },
+ defaultQuery: {
+ 'api-version': params.apiVersion,
+ },
+ };
+
+ return new OpenAI(config);
+};
diff --git a/src/pages/api/openai/createOpenai.ts b/src/pages/api/openai/createOpenai.ts
new file mode 100644
index 000000000000..b3ad8a53c68f
--- /dev/null
+++ b/src/pages/api/openai/createOpenai.ts
@@ -0,0 +1,17 @@
+import OpenAI, { ClientOptions } from 'openai';
+
+import { getServerConfig } from '@/config/server';
+
+// 创建 OpenAI 实例
+export const createOpenai = (userApiKey: string | null, endpoint?: string | null) => {
+ const { OPENAI_API_KEY, OPENAI_PROXY_URL } = getServerConfig();
+
+ const baseURL = endpoint ? endpoint : OPENAI_PROXY_URL ? OPENAI_PROXY_URL : undefined;
+
+ const config: ClientOptions = {
+ apiKey: !userApiKey ? OPENAI_API_KEY : userApiKey,
+ baseURL,
+ };
+
+ return new OpenAI(config);
+};
diff --git a/src/pages/api/openai/models.api.ts b/src/pages/api/openai/models.api.ts
new file mode 100644
index 000000000000..750f13591872
--- /dev/null
+++ b/src/pages/api/openai/models.api.ts
@@ -0,0 +1,17 @@
+import { getOpenAIAuthFromRequest } from '@/const/fetch';
+
+import { createOpenai } from './createOpenai';
+
+export const runtime = 'edge';
+
+export default async function handler(req: Request) {
+ const { apiKey, endpoint } = getOpenAIAuthFromRequest(req);
+
+ const openAI = createOpenai(apiKey, endpoint);
+
+ const res = await openAI.models.list();
+
+ const modelList = res.data.map((i) => i.id);
+
+ return new Response(JSON.stringify(modelList));
+}
diff --git a/src/pages/settings/features/Settings/Common.tsx b/src/pages/settings/features/Settings/Common.tsx
index 3c26412f0d24..0713ce5f0fa2 100644
--- a/src/pages/settings/features/Settings/Common.tsx
+++ b/src/pages/settings/features/Settings/Common.tsx
@@ -3,7 +3,7 @@ import { Form as AntForm, App, Button, Input, Select } from 'antd';
import isEqual from 'fast-deep-equal';
import { changeLanguage } from 'i18next';
import { debounce } from 'lodash-es';
-import { AppWindow, Monitor, Moon, Palette, Sun, Webhook } from 'lucide-react';
+import { AppWindow, Monitor, Moon, Palette, Sun } from 'lucide-react';
import { memo, useCallback, useMemo } from 'react';
import { useTranslation } from 'react-i18next';
@@ -144,30 +144,6 @@ const Common = memo(() => {
[settings],
);
- const openAI: SettingItemGroup = useMemo(
- () => ({
- children: [
- {
- children: