Skip to content

Commit

Permalink
馃拕 style: improve groq location error
Browse files Browse the repository at this point in the history
  • Loading branch information
arvinxx committed May 9, 2024
1 parent 862c0ae commit 023c21b
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 7 deletions.
12 changes: 6 additions & 6 deletions src/config/modelProviders/groq.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,12 @@ import { ModelProviderCard } from '@/types/llm';
// ref https://console.groq.com/docs/models
const Groq: ModelProviderCard = {
chatModels: [
{
displayName: 'LLaMA3-3-70B',
enabled: true,
id: 'llama3-70b-8192',
tokens: 8192,
},
{
displayName: 'Mixtral-8x7b-Instruct-v0.1',
enabled: true,
Expand All @@ -21,12 +27,6 @@ const Groq: ModelProviderCard = {
id: 'llama3-8b-8192',
tokens: 8192,
},
{
displayName: 'LLaMA3-3-70B',
enabled: true,
id: 'llama3-70b-8192',
tokens: 8192,
},
{
displayName: 'LLaMA2-70b-chat',
id: 'llama2-70b-4096',
Expand Down
7 changes: 7 additions & 0 deletions src/libs/agent-runtime/groq/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,13 @@ import { LobeOpenAICompatibleFactory } from '../utils/openaiCompatibleFactory';

export const LobeGroq = LobeOpenAICompatibleFactory({
baseURL: 'https://api.groq.com/openai/v1',
chatCompletion: {
handleError: (error) => {
// 403 means the location is not supporteds
if (error.status === 403)
return { error, errorType: AgentRuntimeErrorType.LocationNotSupportError };
},
},
debug: {
chatCompletion: () => process.env.DEBUG_GROQ_CHAT_COMPLETION === '1',
},
Expand Down
13 changes: 12 additions & 1 deletion src/libs/agent-runtime/utils/openaiCompatibleFactory/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { ChatModelCard } from '@/types/llm';

import { LobeRuntimeAI } from '../../BaseAI';
import { ILobeAgentRuntimeErrorType } from '../../error';
import { ChatCompetitionOptions, ChatStreamPayload } from '../../types';
import { ChatCompetitionOptions, ChatCompletionErrorPayload, ChatStreamPayload } from '../../types';
import { AgentRuntimeError } from '../createError';
import { debugStream } from '../debugStream';
import { desensitizeUrl } from '../desensitizeUrl';
Expand All @@ -28,6 +28,7 @@ const CHAT_MODELS_BLOCK_LIST = [
interface OpenAICompatibleFactoryOptions {
baseURL?: string;
chatCompletion?: {
handleError?: (error: any) => Omit<ChatCompletionErrorPayload, 'provider'> | undefined;
handlePayload?: (payload: ChatStreamPayload) => OpenAI.ChatCompletionCreateParamsStreaming;
};
constructorOptions?: ClientOptions;
Expand Down Expand Up @@ -113,6 +114,16 @@ export const LobeOpenAICompatibleFactory = ({
}
}

if (chatCompletion?.handleError) {
const errorResult = chatCompletion.handleError(error);

if (errorResult)
throw AgentRuntimeError.chat({
...errorResult,
provider,
} as ChatCompletionErrorPayload);
}

const { errorResult, RuntimeError } = handleOpenAIError(error);

throw AgentRuntimeError.chat({
Expand Down

0 comments on commit 023c21b

Please sign in to comment.