Skip to content

Commit

Permalink
✨ feat: 支持模型设置
Browse files Browse the repository at this point in the history
  • Loading branch information
arvinxx committed Jul 16, 2023
1 parent d95027d commit 170567a
Show file tree
Hide file tree
Showing 8 changed files with 86 additions and 140 deletions.
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
"@lobehub/ui": "^1",
"@vercel/analytics": "^1",
"ahooks": "^3",
"ai": "^2",
"antd": "^5",
"antd-style": "^3",
"brotli-wasm": "^1",
Expand All @@ -78,6 +79,7 @@
"nanoid": "^4",
"next": "13.4.7",
"next-i18next": "^14",
"openai-edge": "^1",
"polished": "^4",
"react": "^18",
"react-dom": "^18",
Expand Down
122 changes: 0 additions & 122 deletions src/pages/api/OpenAIStream.ts

This file was deleted.

36 changes: 26 additions & 10 deletions src/pages/api/openai.api.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,31 @@
import { OpenAIStream, OpenAIStreamPayload } from './OpenAIStream';
import { OpenAIStream, StreamingTextResponse } from 'ai';
import { Configuration, OpenAIApi } from 'openai-edge';

if (!process.env.OPENAI_API_KEY) {
throw new Error('Missing env var from OpenAI');
}
import { OpenAIStreamPayload } from '@/types/openai';

const isDev = process.env.NODE_ENV === 'development';
const OPENAI_PROXY_URL = process.env.OPENAI_PROXY_URL;

// Create an OpenAI API client (that's edge friendly!)
const config = new Configuration({
apiKey: process.env.OPENAI_API_KEY,
});

const openai = new OpenAIApi(config, isDev && OPENAI_PROXY_URL ? OPENAI_PROXY_URL : undefined);

export const runtime = 'edge';

export const config = {
runtime: 'edge',
};
export default async function handler(req: Request) {
// Extract the `messages` from the body of the request
const { messages, ...params } = (await req.json()) as OpenAIStreamPayload;

export default async function handler(request: Request) {
const payload = (await request.json()) as OpenAIStreamPayload;
console.log(params);
const response = await openai.createChatCompletion({
stream: true,
...params,
messages: messages.map((m) => ({ content: m.content, role: m.role })),
});

return new Response(OpenAIStream(payload));
const stream = OpenAIStream(response);
return new StreamingTextResponse(stream);
}
9 changes: 5 additions & 4 deletions src/pages/chat/[id]/edit/AgentConfig.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { TextArea } from '@lobehub/ui';
import { Button, Collapse, InputNumber, Segmented, Slider } from 'antd';
import { Collapse, InputNumber, Segmented, Slider } from 'antd';
import isEqual from 'fast-deep-equal';
import { useTranslation } from 'next-i18next';
import { Flexbox } from 'react-layout-kit';
Expand Down Expand Up @@ -45,19 +45,20 @@ const AgentConfig = () => {
value,
}))}
size={'large'}
value={config.model}
/>
</FormItem>
<FormItem label={t('agentPrompt')}>
<Flexbox gap={16}>
<TextArea
onChange={(e) => {
updateAgentConfig({ systemRole: e.target.value });
}}
placeholder={t('agentPromptPlaceholder')}
style={{ minHeight: 160 }}
type={'block'}
value={config.systemRole}
/>
<Flexbox direction={'horizontal-reverse'}>
<Button type={'primary'}>{t('updatePrompt')}</Button>
</Flexbox>
</Flexbox>
</FormItem>
<Collapse
Expand Down
2 changes: 1 addition & 1 deletion src/prompts/agent.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { OpenAIStreamPayload } from '@/pages/api/OpenAIStream';
import { OpenAIStreamPayload } from '@/types/openai';

// 自动起名
export const promptSummaryAgentName = (content: string): Partial<OpenAIStreamPayload> => ({
Expand Down
3 changes: 1 addition & 2 deletions src/prompts/chat.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import { OpenAIStreamPayload } from '@/pages/api/OpenAIStream';
import { OpenAIChatMessage } from '@/types/openai';
import { OpenAIChatMessage, OpenAIStreamPayload } from '@/types/openai';

export const promptSummaryTitle = (
messages: OpenAIChatMessage[],
Expand Down
5 changes: 4 additions & 1 deletion src/services/chatModel.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { merge } from 'lodash-es';

import type { OpenAIStreamPayload } from '@/pages/api/OpenAIStream';
import type { OpenAIStreamPayload } from '@/types/openai';

import { URLS } from './url';

Expand All @@ -13,9 +13,12 @@ export const fetchChatModel = (
) => {
const payload = merge(
{
frequency_penalty: 0,
model: 'gpt-3.5-turbo',
presence_penalty: 0,
stream: true,
temperature: 0.6,
top_p: 1,
},
params,
);
Expand Down
47 changes: 47 additions & 0 deletions src/types/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,50 @@ export interface OpenAIChatMessage {
*/
role: LLMRoleType;
}

/**
* @title OpenAI Stream Payload
*/
export interface OpenAIStreamPayload {
/**
* @title 控制生成文本中的惩罚系数,用于减少重复性
* @default 0
*/
frequency_penalty?: number;
/**
* @title 生成文本的最大长度
*/
max_tokens?: number;
/**
* @title 聊天信息列表
*/
messages: OpenAIChatMessage[];
/**
* @title 模型名称
*/
model: string;
/**
* @title 返回的文本数量
*/
n?: number;
/**
* @title 控制生成文本中的惩罚系数,用于减少主题的变化
* @default 0
*/
presence_penalty?: number;
/**
* @title 是否开启流式请求
* @default true
*/
stream?: boolean;
/**
* @title 生成文本的随机度量,用于控制文本的创造性和多样性
* @default 0.5
*/
temperature: number;
/**
* @title 控制生成文本中最高概率的单个令牌
* @default 1
*/
top_p?: number;
}

0 comments on commit 170567a

Please sign in to comment.