Skip to content

Commit

Permalink
♻️ refactor: refactor openAI to openai and azure
Browse files Browse the repository at this point in the history
  • Loading branch information
arvinxx committed Apr 10, 2024
1 parent 89adf9d commit 2190a95
Show file tree
Hide file tree
Showing 38 changed files with 1,065 additions and 452 deletions.
8 changes: 3 additions & 5 deletions .env.example
Original file line number Diff line number Diff line change
@@ -1,13 +1,9 @@
# add a access code to lock your lobe-chat application, you can set a long password to avoid leaking. If this value contains a comma, it is a password array.
#ACCESS_CODE=lobe66

# add your custom model name, multi model separate by comma. for example gpt-3.5-1106,gpt-4-1106
# CUSTOM_MODELS=model1,model2,model3

# Specify your API Key selection method, currently supporting `random` and `turn`.
# API_KEY_SELECT_MODE=random

# ---- only choose one from OpenAI Service and Azure OpenAI Service ---- #

########################################
############ OpenAI Service ############
Expand All @@ -19,13 +15,15 @@ OPENAI_API_KEY=sk-xxxxxxxxx
# use a proxy to connect to the OpenAI API
# OPENAI_PROXY_URL=https://api.openai.com/v1

# add your custom model name, multi model separate by comma. for example gpt-3.5-1106,gpt-4-1106
#OPENAI_ENABLED_MODELS=gpt-3.5-turbo

########################################
######### Azure OpenAI Service #########
########################################
# you can learn azure OpenAI Service on https://learn.microsoft.com/en-us/azure/ai-services/openai/overview

# use Azure OpenAI Service by uncomment the following line
# USE_AZURE_OPENAI=1

# The API key you applied for on the Azure OpenAI account page, which can be found in the "Keys and Endpoints" section.
# AZURE_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
Expand Down
47 changes: 13 additions & 34 deletions src/app/settings/llm/Azure/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import { Flexbox } from 'react-layout-kit';

import { ModelProvider } from '@/libs/agent-runtime';

import Checker from '../components/Checker';
import ProviderConfig from '../components/ProviderConfig';
import { LLMProviderApiTokenKey, LLMProviderBaseUrlKey, LLMProviderConfigKey } from '../const';

Expand All @@ -24,7 +23,7 @@ const useStyles = createStyles(({ css, token }) => ({
`,
}));

const providerKey = 'azure';
const providerKey = ModelProvider.Azure;

const AzureOpenAIProvider = memo(() => {
const { t } = useTranslation('setting');
Expand All @@ -33,7 +32,7 @@ const AzureOpenAIProvider = memo(() => {

return (
<ProviderConfig
configItems={[
apiKeyItems={[
{
children: (
<Input.Password
Expand All @@ -55,15 +54,13 @@ const AzureOpenAIProvider = memo(() => {
children: (
<AutoComplete
options={[
'2023-12-01-preview',
'2023-08-01-preview',
'2023-07-01-preview',
'2024-02-01',
'2024-03-01-preview',
'2024-02-15-preview',
'2023-10-01-preview',
'2023-06-01-preview',
'2023-03-15-preview',
].map((i) => ({
label: i,
value: i,
}))}
'2023-05-15',
].map((i) => ({ label: i, value: i }))}
placeholder={'20XX-XX-XX'}
/>
),
Expand All @@ -75,30 +72,12 @@ const AzureOpenAIProvider = memo(() => {
label: t('llm.azure.azureApiVersion.title'),
name: [LLMProviderConfigKey, providerKey, 'apiVersion'],
},
{
children: (
<Input.TextArea
allowClear
placeholder={'gpt-35-16k,my-gpt=gpt-35-turbo'}
style={{ height: 100 }}
/>
),
desc: (
<Markdown className={styles.markdown} fontSize={12} variant={'chat'}>
{t('llm.azure.deployments.desc')}
</Markdown>
),

label: t('llm.azure.deployments.title'),
name: [LLMProviderConfigKey, providerKey, 'deployments'],
},
{
children: <Checker model={'gpt-3.5-turbo'} provider={ModelProvider.Azure} />,
desc: t('llm.checker.desc'),
label: t('llm.checker.title'),
minWidth: undefined,
},
]}
checkModel={'gpt-3.5-turbo'}
modelList={{
azureDeployName: true,
placeholder: t('llm.azure.modelListPlaceholder'),
}}
provider={providerKey}
title={
<Flexbox align={'center'} gap={8} horizontal>
Expand Down
14 changes: 3 additions & 11 deletions src/app/settings/llm/Bedrock/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ import { Flexbox } from 'react-layout-kit';
import { ModelProvider } from '@/libs/agent-runtime';
import { GlobalLLMProviderKey } from '@/types/settings';

import Checker from '../components/Checker';
import ProviderConfig from '../components/ProviderConfig';
import { LLMProviderConfigKey } from '../const';

Expand All @@ -18,7 +17,7 @@ const BedrockProvider = memo(() => {

return (
<ProviderConfig
configItems={[
apiKeyItems={[
{
children: (
<Input.Password
Expand Down Expand Up @@ -56,16 +55,9 @@ const BedrockProvider = memo(() => {
label: t(`llm.${providerKey}.region.title`),
name: [LLMProviderConfigKey, providerKey, 'region'],
},
{
children: (
<Checker model={'anthropic.claude-instant-v1'} provider={ModelProvider.Bedrock} />
),
desc: t(`llm.${providerKey}.checker.desc`),
label: t('llm.checker.title'),
minWidth: '100%',
},
]}
provider={providerKey}
checkModel={'anthropic.claude-instant-v1'}
provider={ModelProvider.Bedrock}
title={
<Flexbox align={'center'} gap={8} horizontal>
<Aws.Color size={32} />
Expand Down
130 changes: 9 additions & 121 deletions src/app/settings/llm/OpenAI/index.tsx
Original file line number Diff line number Diff line change
@@ -1,127 +1,15 @@
import { OpenAI } from '@lobehub/icons';
import { Markdown } from '@lobehub/ui';
import { AutoComplete, Input, Switch } from 'antd';
import { createStyles } from 'antd-style';
import { memo } from 'react';
import { useTranslation } from 'react-i18next';

import { ModelProvider } from '@/libs/agent-runtime';
import { useGlobalStore } from '@/store/global';
import { modelConfigSelectors } from '@/store/global/selectors';

import Checker from '../components/Checker';
import ProviderConfig from '../components/ProviderConfig';
import ProviderModelListSelect from '../components/ProviderModelList';
import { LLMProviderConfigKey } from '../const';

const useStyles = createStyles(({ css, token }) => ({
markdown: css`
p {
color: ${token.colorTextDescription} !important;
}
`,
tip: css`
font-size: 12px;
color: ${token.colorTextDescription};
`,
}));
const providerKey = 'openAI';

const LLM = memo(() => {
const { t } = useTranslation('setting');
const { styles } = useStyles();

const [useAzure] = useGlobalStore((s) => [modelConfigSelectors.enableAzure(s)]);

return (
<ProviderConfig
canDeactivate={false}
configItems={[
{
children: (
<Input.Password
autoComplete={'new-password'}
placeholder={
useAzure ? t('llm.azure.token.placeholder') : t('llm.openai.token.placeholder')
}
/>
),
desc: useAzure ? t('llm.azure.token.desc') : t('llm.openai.token.desc'),
label: useAzure ? t('llm.azure.token.title') : t('llm.openai.token.title'),
name: [LLMProviderConfigKey, providerKey, 'OPENAI_API_KEY'],
},
{
children: (
<Input
allowClear
placeholder={
useAzure
? t('llm.azure.endpoint.placeholder')
: t('llm.openai.endpoint.placeholder')
}
/>
),
desc: useAzure ? t('llm.azure.endpoint.desc') : t('llm.openai.endpoint.desc'),
label: useAzure ? t('llm.azure.endpoint.title') : t('llm.openai.endpoint.title'),
name: [LLMProviderConfigKey, providerKey, 'endpoint'],
},
{
children: (
<ProviderModelListSelect
placeholder={t('llm.openai.customModelName.placeholder')}
provider={'openAI'}
/>
),
desc: t('llm.openai.customModelName.desc'),
label: t('llm.openai.customModelName.title'),
name: [LLMProviderConfigKey, providerKey, 'enabledModels'],
},
{
children: <Switch />,
desc: t('llm.openai.useAzure.desc'),
label: t('llm.openai.useAzure.title'),
minWidth: undefined,
name: [LLMProviderConfigKey, 'openAI', 'useAzure'],
valuePropName: 'checked',
},
{
children: (
<AutoComplete
options={[
'2024-02-15-preview',
'2023-12-01-preview',
'2023-08-01-preview',
'2023-07-01-preview',
'2023-06-01-preview',
'2023-05-15',
'2023-03-15-preview',
].map((i) => ({
label: i,
value: i,
}))}
placeholder={'20XX-XX-XX'}
/>
),
desc: (
<Markdown className={styles.markdown} fontSize={12} variant={'chat'}>
{t('llm.openai.azureApiVersion.desc')}
</Markdown>
),
hidden: !useAzure,
label: t('llm.openai.azureApiVersion.title'),
name: [LLMProviderConfigKey, providerKey, 'azureApiVersion'],
},
{
children: <Checker model={'gpt-3.5-turbo'} provider={ModelProvider.OpenAI} />,
desc: t('llm.checker.desc'),
label: t('llm.checker.title'),
minWidth: '100%',
},
]}
provider={providerKey}
title={<OpenAI.Combine size={24} />}
/>
);
});
const OpenAIProvider = memo(() => (
<ProviderConfig
canDeactivate={false}
provider={'openai'}
showEndpoint
title={<OpenAI.Combine size={24} />}
/>
));

export default LLM;
export default OpenAIProvider;
1 change: 0 additions & 1 deletion src/app/settings/llm/OpenRouter/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ const OpenRouterProvider = memo(() => {
<ProviderConfig
checkModel={'mistralai/mistral-7b-instruct:free'}
provider={ModelProvider.OpenRouter}
showCustomModelName
title={
<OpenRouter.Combine
color={theme.isDarkMode ? theme.colorText : OpenRouter.colorPrimary}
Expand Down
1 change: 0 additions & 1 deletion src/app/settings/llm/TogetherAI/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ const TogetherAIProvider = memo(() => {
<ProviderConfig
checkModel={'togethercomputer/alpaca-7b'}
provider={'togetherai'}
showCustomModelName
title={
<Together.Combine
color={theme.isDarkMode ? theme.colorText : Together.colorPrimary}
Expand Down
Loading

0 comments on commit 2190a95

Please sign in to comment.