Skip to content

Commit

Permalink
✨ feat: add DEFAULT_MODEL environment variable (#280)
Browse files Browse the repository at this point in the history
* ✨ feat: add DEFAULT_MODEL environment variable

* set the model maxLength setting in the models definition

* set the model tokenLimit setting in the models definition
  • Loading branch information
thomasleveil authored Mar 29, 2023
1 parent 3f82710 commit 00c6c72
Show file tree
Hide file tree
Showing 9 changed files with 94 additions and 38 deletions.
3 changes: 2 additions & 1 deletion .env.local.example
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
OPENAI_API_KEY=YOUR_KEY
OPENAI_API_KEY=YOUR_KEY
DEFAULT_MODEL=gpt-3.5-turbo
12 changes: 12 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,18 @@ npm run dev

You should be able to start chatting.

## Configuration

When deploying the application, the following environment variables can be set:

| Environment Variable | Default value | Description |
|----------------------|------------------|---------------------------------------------------------|
| OPENAI_API_KEY | | The default API key used for authentication with OpenAI |
| DEFAULT_MODEL | `gpt-3.5-turbo` | The default model to use on new conversations |

If you do not provide an OpenAI API key with `OPENAI_API_KEY`, users will have to provide their own key.
If you don't have an OpenAI API key, you can get one [here](https://platform.openai.com/account/api-keys).

## Contact

If you have any questions, feel free to reach out to me on [Twitter](https://twitter.com/mckaywrigley).
15 changes: 10 additions & 5 deletions components/Chat/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { Conversation, Message } from '@/types/chat';
import { IconArrowDown } from '@tabler/icons-react';
import { KeyValuePair } from '@/types/data';
import { ErrorMessage } from '@/types/error';
import { OpenAIModel } from '@/types/openai';
import { OpenAIModel, OpenAIModelID } from '@/types/openai';
import { Prompt } from '@/types/prompt';
import { throttle } from '@/utils';
import { IconClearAll, IconKey, IconSettings } from '@tabler/icons-react';
Expand All @@ -29,6 +29,7 @@ interface Props {
models: OpenAIModel[];
apiKey: string;
serverSideApiKeyIsSet: boolean;
defaultModelId: OpenAIModelID;
messageIsStreaming: boolean;
modelError: ErrorMessage | null;
loading: boolean;
Expand All @@ -48,6 +49,7 @@ export const Chat: FC<Props> = memo(
models,
apiKey,
serverSideApiKeyIsSet,
defaultModelId,
messageIsStreaming,
modelError,
loading,
Expand Down Expand Up @@ -206,6 +208,7 @@ export const Chat: FC<Props> = memo(
<ModelSelect
model={conversation.model}
models={models}
defaultModelId={defaultModelId}
onModelChange={(model) =>
onUpdateConversation(conversation, {
key: 'model',
Expand Down Expand Up @@ -236,12 +239,13 @@ export const Chat: FC<Props> = memo(
className="ml-2 cursor-pointer hover:opacity-50"
onClick={handleSettings}
>
<IconSettings size={18} />
<IconSettings size={18} />
</button>
<button
className="ml-2 cursor-pointer hover:opacity-50"
onClick={onClearAll}>
<IconClearAll size={18} />
onClick={onClearAll}
>
<IconClearAll size={18} />
</button>
</div>
{showSettings && (
Expand All @@ -250,6 +254,7 @@ export const Chat: FC<Props> = memo(
<ModelSelect
model={conversation.model}
models={models}
defaultModelId={defaultModelId}
onModelChange={(model) =>
onUpdateConversation(conversation, {
key: 'model',
Expand Down Expand Up @@ -306,7 +311,7 @@ export const Chat: FC<Props> = memo(
className="flex h-7 w-7 items-center justify-center rounded-full bg-white shadow-md hover:shadow-lg focus:outline-none focus:ring-2 focus:ring-blue-500 dark:bg-[#515152d7]"
onClick={handleScrollDown}
>
<IconArrowDown size={18}/>
<IconArrowDown size={18} />
</button>
</div>
)}
Expand Down
27 changes: 16 additions & 11 deletions components/Chat/ChatInput.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { Message } from '@/types/chat';
import { OpenAIModel, OpenAIModelID } from '@/types/openai';
import { OpenAIModel } from '@/types/openai';
import { Prompt } from '@/types/prompt';
import { IconPlayerStop, IconRepeat, IconSend } from '@tabler/icons-react';
import { useTranslation } from 'next-i18next';
Expand Down Expand Up @@ -56,7 +56,7 @@ export const ChatInput: FC<Props> = ({

const handleChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
const value = e.target.value;
const maxLength = model.id === OpenAIModelID.GPT_3_5 ? 12000 : 24000;
const maxLength = model.maxLength;

if (value.length > maxLength) {
alert(
Expand Down Expand Up @@ -109,7 +109,10 @@ export const ChatInput: FC<Props> = ({
const selectedPrompt = filteredPrompts[activePromptIndex];
if (selectedPrompt) {
setContent((prevContent) => {
const newContent = prevContent?.replace(/\/\w*$/, selectedPrompt.content);
const newContent = prevContent?.replace(
/\/\w*$/,
selectedPrompt.content,
);
return newContent;
});
handlePromptSelect(selectedPrompt);
Expand Down Expand Up @@ -211,8 +214,9 @@ export const ChatInput: FC<Props> = ({
if (textareaRef && textareaRef.current) {
textareaRef.current.style.height = 'inherit';
textareaRef.current.style.height = `${textareaRef.current?.scrollHeight}px`;
textareaRef.current.style.overflow = `${textareaRef?.current?.scrollHeight > 400 ? 'auto' : 'hidden'
}`;
textareaRef.current.style.overflow = `${
textareaRef?.current?.scrollHeight > 400 ? 'auto' : 'hidden'
}`;
}
}, [content]);

Expand Down Expand Up @@ -257,15 +261,16 @@ export const ChatInput: FC<Props> = ({
<div className="relative mx-2 flex w-full flex-grow flex-col rounded-md border border-black/10 bg-white shadow-[0_0_10px_rgba(0,0,0,0.10)] dark:border-gray-900/50 dark:bg-[#40414F] dark:text-white dark:shadow-[0_0_15px_rgba(0,0,0,0.10)] sm:mx-4">
<textarea
ref={textareaRef}
className="m-0 w-full resize-none border-0 bg-transparent p-0 pr-8 pl-2 text-black dark:bg-transparent dark:text-white py-2 md:py-3 md:pl-4"
className="m-0 w-full resize-none border-0 bg-transparent p-0 py-2 pr-8 pl-2 text-black dark:bg-transparent dark:text-white md:py-3 md:pl-4"
style={{
resize: 'none',
bottom: `${textareaRef?.current?.scrollHeight}px`,
maxHeight: '400px',
overflow: `${textareaRef.current && textareaRef.current.scrollHeight > 400
? 'auto'
: 'hidden'
}`,
overflow: `${
textareaRef.current && textareaRef.current.scrollHeight > 400
? 'auto'
: 'hidden'
}`,
}}
placeholder={
t('Type a message or type "/" to select a prompt...') || ''
Expand All @@ -278,7 +283,7 @@ export const ChatInput: FC<Props> = ({
onKeyDown={handleKeyDown}
/>
<button
className="absolute right-2 top-2 rounded-sm p-1 text-neutral-800 hover:bg-neutral-200 hover:text-neutral-900 dark:bg-opacity-50 dark:text-neutral-100 dark:hover:text-neutral-200 opacity-60"
className="absolute right-2 top-2 rounded-sm p-1 text-neutral-800 opacity-60 hover:bg-neutral-200 hover:text-neutral-900 dark:bg-opacity-50 dark:text-neutral-100 dark:hover:text-neutral-200"
onClick={handleSend}
>
<IconSend size={18} />
Expand Down
17 changes: 13 additions & 4 deletions components/Chat/ModelSelect.tsx
Original file line number Diff line number Diff line change
@@ -1,15 +1,22 @@
import { OpenAIModel } from '@/types/openai';
import { OpenAIModel, OpenAIModelID } from '@/types/openai';
import { useTranslation } from 'next-i18next';
import { FC } from 'react';

interface Props {
model: OpenAIModel;
models: OpenAIModel[];
defaultModelId: OpenAIModelID;
onModelChange: (model: OpenAIModel) => void;
}

export const ModelSelect: FC<Props> = ({ model, models, onModelChange }) => {
export const ModelSelect: FC<Props> = ({
model,
models,
defaultModelId,
onModelChange,
}) => {
const { t } = useTranslation('chat');

return (
<div className="flex flex-col">
<label className="mb-2 text-left text-neutral-700 dark:text-neutral-400">
Expand All @@ -19,7 +26,7 @@ export const ModelSelect: FC<Props> = ({ model, models, onModelChange }) => {
<select
className="w-full bg-transparent p-2"
placeholder={t('Select a model') || ''}
value={model.id}
value={model?.id || defaultModelId}
onChange={(e) => {
onModelChange(
models.find(
Expand All @@ -34,7 +41,9 @@ export const ModelSelect: FC<Props> = ({ model, models, onModelChange }) => {
value={model.id}
className="dark:bg-[#343541] dark:text-white"
>
{model.name}
{model.id === defaultModelId
? `Default (${model.name})`
: model.name}
</option>
))}
</select>
Expand Down
3 changes: 1 addition & 2 deletions components/Chat/SystemPrompt.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,7 @@ export const SystemPrompt: FC<Props> = ({

const handleChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
const value = e.target.value;
const maxLength =
conversation.model.id === OpenAIModelID.GPT_3_5 ? 12000 : 24000;
const maxLength = conversation.model.maxLength;

if (value.length > maxLength) {
alert(
Expand Down
5 changes: 1 addition & 4 deletions pages/api/chat.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import { ChatBody, Message } from '@/types/chat';
import { OpenAIModelID } from '@/types/openai';
import { DEFAULT_SYSTEM_PROMPT } from '@/utils/app/const';
import { OpenAIStream } from '@/utils/server';
import tiktokenModel from '@dqbd/tiktoken/encoders/cl100k_base.json';
Expand All @@ -22,8 +21,6 @@ const handler = async (req: Request): Promise<Response> => {
tiktokenModel.pat_str,
);

const tokenLimit = model.id === OpenAIModelID.GPT_4 ? 6000 : 3000;

let promptToSend = prompt;
if (!promptToSend) {
promptToSend = DEFAULT_SYSTEM_PROMPT;
Expand All @@ -38,7 +35,7 @@ const handler = async (req: Request): Promise<Response> => {
const message = messages[i];
const tokens = encoding.encode(message.content);

if (tokenCount + tokens.length > tokenLimit) {
if (tokenCount + tokens.length > model.tokenLimit) {
break;
}
tokenCount += tokens.length;
Expand Down
39 changes: 29 additions & 10 deletions pages/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,12 @@ import { KeyValuePair } from '@/types/data';
import { ErrorMessage } from '@/types/error';
import { LatestExportFormat, SupportedExportFormats } from '@/types/export';
import { Folder, FolderType } from '@/types/folder';
import { OpenAIModel, OpenAIModelID, OpenAIModels } from '@/types/openai';
import {
fallbackModelID,
OpenAIModel,
OpenAIModelID,
OpenAIModels,
} from '@/types/openai';
import { Prompt } from '@/types/prompt';
import {
cleanConversationHistory,
Expand All @@ -32,9 +37,13 @@ import { v4 as uuidv4 } from 'uuid';

interface HomeProps {
serverSideApiKeyIsSet: boolean;
defaultModelId: OpenAIModelID;
}

const Home: React.FC<HomeProps> = ({ serverSideApiKeyIsSet }) => {
const Home: React.FC<HomeProps> = ({
serverSideApiKeyIsSet,
defaultModelId,
}) => {
const { t } = useTranslation('chat');

// STATE ----------------------------------------------
Expand Down Expand Up @@ -371,7 +380,7 @@ const Home: React.FC<HomeProps> = ({ serverSideApiKeyIsSet }) => {
id: uuidv4(),
name: `${t('New Conversation')}`,
messages: [],
model: OpenAIModels[OpenAIModelID.GPT_3_5],
model: lastConversation?.model || defaultModelId,
prompt: DEFAULT_SYSTEM_PROMPT,
folderId: null,
};
Expand Down Expand Up @@ -404,7 +413,7 @@ const Home: React.FC<HomeProps> = ({ serverSideApiKeyIsSet }) => {
id: uuidv4(),
name: 'New conversation',
messages: [],
model: OpenAIModels[OpenAIModelID.GPT_3_5],
model: OpenAIModels[defaultModelId],
prompt: DEFAULT_SYSTEM_PROMPT,
folderId: null,
});
Expand Down Expand Up @@ -438,7 +447,7 @@ const Home: React.FC<HomeProps> = ({ serverSideApiKeyIsSet }) => {
id: uuidv4(),
name: 'New conversation',
messages: [],
model: OpenAIModels[OpenAIModelID.GPT_3_5],
model: OpenAIModels[defaultModelId],
prompt: DEFAULT_SYSTEM_PROMPT,
folderId: null,
});
Expand Down Expand Up @@ -486,7 +495,7 @@ const Home: React.FC<HomeProps> = ({ serverSideApiKeyIsSet }) => {
name: `Prompt ${prompts.length + 1}`,
description: '',
content: '',
model: OpenAIModels[OpenAIModelID.GPT_3_5],
model: OpenAIModels[defaultModelId],
folderId: null,
};

Expand Down Expand Up @@ -601,7 +610,7 @@ const Home: React.FC<HomeProps> = ({ serverSideApiKeyIsSet }) => {
id: uuidv4(),
name: 'New conversation',
messages: [],
model: OpenAIModels[OpenAIModelID.GPT_3_5],
model: OpenAIModels[defaultModelId],
prompt: DEFAULT_SYSTEM_PROMPT,
folderId: null,
});
Expand Down Expand Up @@ -663,7 +672,7 @@ const Home: React.FC<HomeProps> = ({ serverSideApiKeyIsSet }) => {
</button>
<div
onClick={handleToggleChatbar}
className="absolute top-0 left-0 z-10 w-full h-full bg-black opacity-70 sm:hidden"
className="absolute top-0 left-0 z-10 h-full w-full bg-black opacity-70 sm:hidden"
></div>
</div>
) : (
Expand All @@ -681,6 +690,7 @@ const Home: React.FC<HomeProps> = ({ serverSideApiKeyIsSet }) => {
messageIsStreaming={messageIsStreaming}
apiKey={apiKey}
serverSideApiKeyIsSet={serverSideApiKeyIsSet}
defaultModelId={defaultModelId}
modelError={modelError}
models={models}
loading={loading}
Expand Down Expand Up @@ -713,7 +723,7 @@ const Home: React.FC<HomeProps> = ({ serverSideApiKeyIsSet }) => {
</button>
<div
onClick={handleTogglePromptbar}
className="absolute top-0 left-0 z-10 w-full h-full bg-black opacity-70 sm:hidden"
className="absolute top-0 left-0 z-10 h-full w-full bg-black opacity-70 sm:hidden"
></div>
</div>
) : (
Expand All @@ -733,15 +743,24 @@ const Home: React.FC<HomeProps> = ({ serverSideApiKeyIsSet }) => {
export default Home;

export const getServerSideProps: GetServerSideProps = async ({ locale }) => {
const defaultModelId =
(process.env.DEFAULT_MODEL &&
Object.values(OpenAIModelID).includes(
process.env.DEFAULT_MODEL as OpenAIModelID,
) &&
process.env.DEFAULT_MODEL) ||
fallbackModelID;

return {
props: {
serverSideApiKeyIsSet: !!process.env.OPENAI_API_KEY,
defaultModelId,
...(await serverSideTranslations(locale ?? 'en', [
'common',
'chat',
'sidebar',
'markdown',
'promptbar'
'promptbar',
])),
},
};
Expand Down
11 changes: 10 additions & 1 deletion types/openai.ts
Original file line number Diff line number Diff line change
@@ -1,20 +1,29 @@
export interface OpenAIModel {
id: string;
name: string;
maxLength: number; // maximum length of a message
tokenLimit: number;
}

export enum OpenAIModelID {
GPT_3_5 = 'gpt-3.5-turbo',
GPT_4 = 'gpt-4',
}

// in case the `DEFAULT_MODEL` environment variable is not set or set to an unsupported model
export const fallbackModelID = OpenAIModelID.GPT_3_5;

export const OpenAIModels: Record<OpenAIModelID, OpenAIModel> = {
[OpenAIModelID.GPT_3_5]: {
id: OpenAIModelID.GPT_3_5,
name: 'Default (GPT-3.5)',
name: 'GPT-3.5',
maxLength: 12000,
tokenLimit: 3000,
},
[OpenAIModelID.GPT_4]: {
id: OpenAIModelID.GPT_4,
name: 'GPT-4',
maxLength: 24000,
tokenLimit: 6000,
},
};

1 comment on commit 00c6c72

@vercel
Copy link

@vercel vercel bot commented on 00c6c72 Mar 29, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.