Skip to content

Commit

Permalink
✨ feat: support close openai
Browse files Browse the repository at this point in the history
  • Loading branch information
arvinxx committed Apr 10, 2024
1 parent c398063 commit 1ff1aef
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 14 deletions.
7 changes: 1 addition & 6 deletions src/app/settings/llm/OpenAI/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,7 @@ import { memo } from 'react';
import ProviderConfig from '../components/ProviderConfig';

const OpenAIProvider = memo(() => (
<ProviderConfig
canDeactivate={false}
provider={'openai'}
showEndpoint
title={<OpenAI.Combine size={24} />}
/>
<ProviderConfig provider={'openai'} showEndpoint title={<OpenAI.Combine size={24} />} />
));

export default OpenAIProvider;
22 changes: 22 additions & 0 deletions src/config/server/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,10 @@ declare global {
* @deprecated
*/
OLLAMA_CUSTOM_MODELS?: string;
/**
* @deprecated
*/
OPENROUTER_CUSTOM_MODELS?: string;
}
}
}
Expand Down Expand Up @@ -119,6 +123,24 @@ export const getProviderConfig = () => {
regions = process.env.OPENAI_FUNCTION_REGIONS.split(',');
}

if (process.env.CUSTOM_MODELS) {
console.warn(
'DEPRECATED: `CUSTOM_MODELS` is deprecated, please use `OPENAI_MODEL_LIST` instead, we will remove `CUSTOM_MODELS` in the LobeChat 1.0',
);
}

if (process.env.OLLAMA_CUSTOM_MODELS) {
console.warn(
'DEPRECATED: `OLLAMA_CUSTOM_MODELS` is deprecated, please use `OLLAMA_MODEL_LIST` instead, we will remove `OLLAMA_CUSTOM_MODELS` in the LobeChat 1.0',
);
}

if (process.env.OPENROUTER_CUSTOM_MODELS) {
console.warn(
'DEPRECATED: `OPENROUTER_CUSTOM_MODELS` is deprecated, please use `OPENROUTER_MODEL_LIST` instead, we will remove `OPENROUTER_CUSTOM_MODELS` in the LobeChat 1.0',
);
}

return {
API_KEY_SELECT_MODE: process.env.API_KEY_SELECT_MODE,

Expand Down
2 changes: 1 addition & 1 deletion src/const/settings/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ export const DEFAULT_LLM_CONFIG: GlobalLLMConfig = {
},
openai: {
apiKey: '',
enabled: false,
enabled: true,
enabledModels: filterEnabledModels(OpenAIProvider),
},
openrouter: {
Expand Down
9 changes: 2 additions & 7 deletions src/store/global/slices/settings/selectors/modelConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,8 @@ const providerConfig = (provider: string) => (s: GlobalStore) =>
| GeneralModelProviderConfig
| undefined;

const providerEnabled = (provider: GlobalLLMProviderKey) => (s: GlobalStore) => {
// TODO: we need to migrate the 'openAI' key to 'openai'
// @ts-ignore
if (provider === 'openai') return true;

return currentSettings(s).languageModel[provider]?.enabled || false;
};
const providerEnabled = (provider: GlobalLLMProviderKey) => (s: GlobalStore) =>
currentSettings(s).languageModel[provider]?.enabled || false;

const providerEnableModels = (provider: string) => (s: GlobalStore) => {
if (!providerConfig(provider)(s)?.enabledModels) return;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ const serverProviderModelCards =
* define all the model list of providers
*/
const providerModelList = (s: GlobalStore): ModelProviderCard[] => {
// if the chat model is config in the server side, use the server side model cards
const openaiChatModels = serverProviderModelCards('openai')(s);
const ollamaChatModels = serverProviderModelCards('ollama')(s);
const openrouterChatModels = serverProviderModelCards('openrouter')(s);
Expand Down

0 comments on commit 1ff1aef

Please sign in to comment.