Skip to content

Commit

Permalink
♻️ refactor: mirgrate openai-edge to openai (lobehub#145)
Browse files Browse the repository at this point in the history
* ♻️ refactor: 重构 openai-edge 到 openai

* 🔊 chore: add log

* 🐛 fix: fix openai baseURL error

* ✏️ ci: fix types
  • Loading branch information
arvinxx authored Sep 6, 2023
1 parent 742872d commit 75ee574
Show file tree
Hide file tree
Showing 9 changed files with 64 additions and 44 deletions.
1 change: 1 addition & 0 deletions .eslintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,6 @@ config.rules['unicorn/prefer-code-point'] = 0;
config.rules['no-extra-boolean-cast'] = 0;
config.rules['unicorn/no-useless-undefined'] = 0;
config.rules['react/no-unknown-property'] = 0;
config.rules['unicorn/prefer-ternary'] = 0;

module.exports = config;
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@
"@emoji-mart/data": "^1",
"@emoji-mart/react": "^1",
"@icons-pack/react-simple-icons": "^9",
"@lobehub/chat-plugin-sdk": "^1.15.1",
"@lobehub/chat-plugin-sdk": "^1.17.0",
"@lobehub/chat-plugins-gateway": "^1.5.0",
"@lobehub/ui": "latest",
"@vercel/analytics": "^1",
Expand All @@ -85,7 +85,7 @@
"lucide-react": "latest",
"nanoid": "^4",
"next": "13.4.7",
"openai-edge": "^1",
"openai": "^4.4.0",
"polished": "^4",
"react": "^18",
"react-dom": "^18",
Expand Down
69 changes: 33 additions & 36 deletions src/pages/api/openai.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { OpenAIStream, OpenAIStreamCallbacks, StreamingTextResponse } from 'ai';
import { Configuration, OpenAIApi } from 'openai-edge';
import { OpenAIStream, StreamingTextResponse } from 'ai';
import OpenAI, { ClientOptions } from 'openai';

import { getServerConfig } from '@/config/server';
import { createErrorResponse } from '@/pages/api/error';
Expand All @@ -10,25 +10,29 @@ import { OpenAIStreamPayload } from '@/types/openai';
export const createOpenAI = (userApiKey: string | null, endpoint?: string | null) => {
const { OPENAI_API_KEY, OPENAI_PROXY_URL } = getServerConfig();

const config = new Configuration({
const baseURL = endpoint ? endpoint : OPENAI_PROXY_URL ? OPENAI_PROXY_URL : undefined;

const config: ClientOptions = {
apiKey: !userApiKey ? OPENAI_API_KEY : userApiKey,
});
};

const basePath = endpoint ? endpoint : OPENAI_PROXY_URL ? OPENAI_PROXY_URL : undefined;
// a bug with openai: https://github.com/openai/openai-node/issues/283
// TODO: should refactor when openai fix the bug
if (baseURL) {
config.baseURL = baseURL;
}

return new OpenAIApi(config, basePath);
return new OpenAI(config);
};

interface CreateChatCompletionOptions {
OPENAI_API_KEY: string | null;
callbacks?: (payload: OpenAIStreamPayload) => OpenAIStreamCallbacks;
endpoint?: string | null;
payload: OpenAIStreamPayload;
}

export const createChatCompletion = async ({
payload,
callbacks,
OPENAI_API_KEY,
endpoint,
}: CreateChatCompletionOptions) => {
Expand All @@ -47,37 +51,30 @@ export const createChatCompletion = async ({

// ============ 2. 发送请求 ============ //

const requestParams = { messages: formatMessages, stream: true, ...params };

let response: Response;

try {
response = await openai.createChatCompletion(requestParams);
const response = await openai.chat.completions.create({
messages: formatMessages,
...params,
stream: true,
});
const stream = OpenAIStream(response);
return new StreamingTextResponse(stream);
} catch (error) {
// 如果 await 超时报错,说明是 OpenAI 服务端的问题
return createErrorResponse(ChatErrorType.GatewayTimeout, { message: error });
}

// ============ 4. 处理异常响应 ============ //
if (!response.ok) {
let error;

try {
// 正常情况下应该是 OpenAI 的 JSON
error = await response.clone().json();
} catch {
// 如果不是 JSON,那么可能是其他接口端的响应,读 text 为结果
const result = await response.text();

error = { message: result };
// Check if the error is an OpenAI APIError
if (error instanceof OpenAI.APIError) {
return createErrorResponse(ChatErrorType.OpenAIBizError, {
endpoint: !!endpoint ? endpoint : undefined,
error: error.error ?? error.cause,
});
}

return createErrorResponse(ChatErrorType.OpenAIBizError, { ...error, endpoint });
}

// ============ 5. 发送正常相应 ============ //

const stream = OpenAIStream(response, callbacks?.(requestParams));
// track the error that not an OpenAI APIError
console.error(error);

return new StreamingTextResponse(stream);
// return as a GatewayTimeout error
return createErrorResponse(ChatErrorType.InternalServerError, {
endpoint,
error: JSON.stringify(error),
});
}
};
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { PluginRenderProps } from '@lobehub/chat-plugin-sdk';
import { PluginRenderProps } from '@lobehub/chat-plugin-sdk/client';
import { Skeleton } from 'antd';
import { memo, useEffect, useRef, useState } from 'react';

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { PluginChannel } from '@lobehub/chat-plugin-sdk';
import { PluginChannel } from '@lobehub/chat-plugin-sdk/client';

export const onPluginReady = (e: MessageEvent, onReady: () => void) => {
if (e.data.type === PluginChannel.pluginReadyForRender) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { PluginRender, PluginRenderProps } from '@lobehub/chat-plugin-sdk';
import { PluginRender, PluginRenderProps } from '@lobehub/chat-plugin-sdk/client';
import { Skeleton } from 'antd';
import { memo, useEffect, useState } from 'react';

Expand Down
3 changes: 1 addition & 2 deletions src/services/chatModel.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
import { merge } from 'lodash-es';
import { ChatCompletionFunctions } from 'openai-edge/types/api';

import { LOBE_CHAT_ACCESS_CODE, OPENAI_API_KEY_HEADER_KEY, OPENAI_END_POINT } from '@/const/fetch';
import { useGlobalStore } from '@/store/global';
import { pluginSelectors, usePluginStore } from '@/store/plugin';
import { initialLobeAgentConfig } from '@/store/session/initialState';
import type { OpenAIStreamPayload } from '@/types/openai';
import type { ChatCompletionFunctions, OpenAIStreamPayload } from '@/types/openai';

import { URLS } from './url';

Expand Down
2 changes: 1 addition & 1 deletion src/store/plugin/selectors.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { uniqBy } from 'lodash-es';
import { ChatCompletionFunctions } from 'openai-edge/types/api';

import { PLUGIN_SCHEMA_SEPARATOR } from '@/const/plugin';
import { pluginHelpers } from '@/store/plugin/helpers';
import { ChatCompletionFunctions } from '@/types/openai';

import { PluginStoreState } from './initialState';

Expand Down
23 changes: 23 additions & 0 deletions src/types/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,3 +68,26 @@ export interface OpenAIStreamPayload {
*/
top_p?: number;
}

export interface ChatCompletionFunctions {
/**
* The description of what the function does.
* @type {string}
* @memberof ChatCompletionFunctions
*/
description?: string;
/**
* The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.
* @type {string}
* @memberof ChatCompletionFunctions
*/
name: string;
/**
* The parameters the functions accepts, described as a JSON Schema object. See the [guide](/docs/guides/gpt/function-calling) for examples, and the [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for documentation about the format.
* @type {{ [key: string]: any }}
* @memberof ChatCompletionFunctions
*/
parameters?: {
[key: string]: any;
};
}

0 comments on commit 75ee574

Please sign in to comment.