diff --git a/docker/docker-compose.proxy.yml b/docker/docker-compose.proxy.yml index 2a68a630..ec31979f 100644 --- a/docker/docker-compose.proxy.yml +++ b/docker/docker-compose.proxy.yml @@ -49,6 +49,7 @@ services: - DATABASE_URL=${DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/bytebotdb} - BYTEBOT_DESKTOP_BASE_URL=${BYTEBOT_DESKTOP_BASE_URL:-http://bytebot-desktop:9990} - BYTEBOT_LLM_PROXY_URL=${BYTEBOT_LLM_PROXY_URL:-http://bytebot-llm-proxy:4000} + - BYTEBOT_LLM_PROXY_API_KEY=${BYTEBOT_LLM_PROXY_API_KEY:-} depends_on: - postgres networks: diff --git a/docs/deployment/litellm.mdx b/docs/deployment/litellm.mdx index 41d79d6b..9ba96f3f 100644 --- a/docs/deployment/litellm.mdx +++ b/docs/deployment/litellm.mdx @@ -51,7 +51,7 @@ Bytebot includes a pre-configured LiteLLM proxy service that makes it easy to us This automatically: - Starts the `bytebot-llm-proxy` service on port 4000 - - Configures the agent to use the proxy via `BYTEBOT_LLM_PROXY_URL` + - Configures the agent to use the proxy via `BYTEBOT_LLM_PROXY_URL` and the optional `BYTEBOT_LLM_PROXY_API_KEY` - Makes all configured models available through the proxy @@ -304,6 +304,7 @@ services: environment: # Point to your external LiteLLM instance - BYTEBOT_LLM_PROXY_URL=http://your-litellm-server:4000 + - BYTEBOT_LLM_PROXY_API_KEY=sk-your-key-here # ... rest of config ``` @@ -312,6 +313,7 @@ services: ```bash # Set the proxy URL before starting export BYTEBOT_LLM_PROXY_URL=http://your-litellm-server:4000 +export BYTEBOT_LLM_PROXY_API_KEY=sk-your-key-here # Start normally docker-compose -f docker/docker-compose.yml up -d @@ -331,6 +333,7 @@ docker run -d \ # Then start Bytebot with: export BYTEBOT_LLM_PROXY_URL=http://localhost:4000 +export BYTEBOT_LLM_PROXY_API_KEY=sk-your-key-here docker-compose up -d ``` diff --git a/helm/charts/bytebot-agent/templates/deployment.yaml b/helm/charts/bytebot-agent/templates/deployment.yaml index 059e8fa3..0dc35f8b 100644 --- a/helm/charts/bytebot-agent/templates/deployment.yaml +++ b/helm/charts/bytebot-agent/templates/deployment.yaml @@ -31,6 +31,10 @@ spec: - name: BYTEBOT_LLM_PROXY_URL value: {{ .Values.config.llmProxyUrl | default .Values.env.BYTEBOT_LLM_PROXY_URL | quote }} {{- end }} + {{- if or .Values.config.llmProxyApiKey .Values.env.BYTEBOT_LLM_PROXY_API_KEY }} + - name: BYTEBOT_LLM_PROXY_API_KEY + value: {{ .Values.config.llmProxyApiKey | default .Values.env.BYTEBOT_LLM_PROXY_API_KEY | quote }} + {{- end }} {{- /* Anthropic API Key */ -}} {{- if .Values.env.ANTHROPIC_API_KEY }} - name: ANTHROPIC_API_KEY diff --git a/helm/charts/bytebot-agent/values.yaml b/helm/charts/bytebot-agent/values.yaml index a231f1a0..985839aa 100644 --- a/helm/charts/bytebot-agent/values.yaml +++ b/helm/charts/bytebot-agent/values.yaml @@ -32,6 +32,7 @@ env: DATABASE_URL: "" BYTEBOT_DESKTOP_BASE_URL: "http://bytebot-desktop:9990" BYTEBOT_LLM_PROXY_URL: "" + BYTEBOT_LLM_PROXY_API_KEY: "" # Legacy API key values for backward compatibility ANTHROPIC_API_KEY: "" OPENAI_API_KEY: "" diff --git a/packages/bytebot-agent/src/proxy/proxy.service.ts b/packages/bytebot-agent/src/proxy/proxy.service.ts index 30e843f8..e5e87309 100644 --- a/packages/bytebot-agent/src/proxy/proxy.service.ts +++ b/packages/bytebot-agent/src/proxy/proxy.service.ts @@ -32,6 +32,7 @@ export class ProxyService implements BytebotAgentService { constructor(private readonly configService: ConfigService) { const proxyUrl = this.configService.get('BYTEBOT_LLM_PROXY_URL'); + const proxyApiKey = this.configService.get('BYTEBOT_LLM_PROXY_API_KEY'); if (!proxyUrl) { this.logger.warn( @@ -41,7 +42,7 @@ export class ProxyService implements BytebotAgentService { // Initialize OpenAI client with proxy configuration this.openai = new OpenAI({ - apiKey: 'dummy-key-for-proxy', + apiKey: proxyApiKey ?? 'dummy-key-for-proxy', baseURL: proxyUrl, }); } diff --git a/packages/bytebot-agent/src/tasks/tasks.controller.ts b/packages/bytebot-agent/src/tasks/tasks.controller.ts index 982c4a4f..8acac58e 100644 --- a/packages/bytebot-agent/src/tasks/tasks.controller.ts +++ b/packages/bytebot-agent/src/tasks/tasks.controller.ts @@ -25,6 +25,7 @@ const anthropicApiKey = process.env.ANTHROPIC_API_KEY; const openaiApiKey = process.env.OPENAI_API_KEY; const proxyUrl = process.env.BYTEBOT_LLM_PROXY_URL; +const proxyApiKey = process.env.BYTEBOT_LLM_PROXY_API_KEY; const models = [ ...(anthropicApiKey ? ANTHROPIC_MODELS : []), @@ -70,11 +71,18 @@ export class TasksController { async getModels() { if (proxyUrl) { try { + const headers: Record = { + 'Content-Type': 'application/json', + }; + + if (proxyApiKey) { + // LiteLLM proxy is OpenAI-compatible → Bearer token expected + headers.Authorization = `Bearer ${proxyApiKey}`; + } + const response = await fetch(`${proxyUrl}/model/info`, { method: 'GET', - headers: { - 'Content-Type': 'application/json', - }, + headers, }); if (!response.ok) {