Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docker/docker-compose.proxy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ services:
- DATABASE_URL=${DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/bytebotdb}
- BYTEBOT_DESKTOP_BASE_URL=${BYTEBOT_DESKTOP_BASE_URL:-http://bytebot-desktop:9990}
- BYTEBOT_LLM_PROXY_URL=${BYTEBOT_LLM_PROXY_URL:-http://bytebot-llm-proxy:4000}
- BYTEBOT_LLM_PROXY_API_KEY=${BYTEBOT_LLM_PROXY_API_KEY:-}
depends_on:
- postgres
networks:
Expand Down
5 changes: 4 additions & 1 deletion docs/deployment/litellm.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ Bytebot includes a pre-configured LiteLLM proxy service that makes it easy to us

This automatically:
- Starts the `bytebot-llm-proxy` service on port 4000
- Configures the agent to use the proxy via `BYTEBOT_LLM_PROXY_URL`
- Configures the agent to use the proxy via `BYTEBOT_LLM_PROXY_URL` and the optional `BYTEBOT_LLM_PROXY_API_KEY`
- Makes all configured models available through the proxy
</Step>

Expand Down Expand Up @@ -304,6 +304,7 @@ services:
environment:
# Point to your external LiteLLM instance
- BYTEBOT_LLM_PROXY_URL=http://your-litellm-server:4000
- BYTEBOT_LLM_PROXY_API_KEY=sk-your-key-here
# ... rest of config
```

Expand All @@ -312,6 +313,7 @@ services:
```bash
# Set the proxy URL before starting
export BYTEBOT_LLM_PROXY_URL=http://your-litellm-server:4000
export BYTEBOT_LLM_PROXY_API_KEY=sk-your-key-here

# Start normally
docker-compose -f docker/docker-compose.yml up -d
Expand All @@ -331,6 +333,7 @@ docker run -d \

# Then start Bytebot with:
export BYTEBOT_LLM_PROXY_URL=http://localhost:4000
export BYTEBOT_LLM_PROXY_API_KEY=sk-your-key-here
docker-compose up -d
```

Expand Down
4 changes: 4 additions & 0 deletions helm/charts/bytebot-agent/templates/deployment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,10 @@ spec:
- name: BYTEBOT_LLM_PROXY_URL
value: {{ .Values.config.llmProxyUrl | default .Values.env.BYTEBOT_LLM_PROXY_URL | quote }}
{{- end }}
{{- if or .Values.config.llmProxyApiKey .Values.env.BYTEBOT_LLM_PROXY_API_KEY }}
- name: BYTEBOT_LLM_PROXY_API_KEY
value: {{ .Values.config.llmProxyApiKey | default .Values.env.BYTEBOT_LLM_PROXY_API_KEY | quote }}
{{- end }}
{{- /* Anthropic API Key */ -}}
{{- if .Values.env.ANTHROPIC_API_KEY }}
- name: ANTHROPIC_API_KEY
Expand Down
1 change: 1 addition & 0 deletions helm/charts/bytebot-agent/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ env:
DATABASE_URL: ""
BYTEBOT_DESKTOP_BASE_URL: "http://bytebot-desktop:9990"
BYTEBOT_LLM_PROXY_URL: ""
BYTEBOT_LLM_PROXY_API_KEY: ""
# Legacy API key values for backward compatibility
ANTHROPIC_API_KEY: ""
OPENAI_API_KEY: ""
Expand Down
3 changes: 2 additions & 1 deletion packages/bytebot-agent/src/proxy/proxy.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ export class ProxyService implements BytebotAgentService {

constructor(private readonly configService: ConfigService) {
const proxyUrl = this.configService.get<string>('BYTEBOT_LLM_PROXY_URL');
const proxyApiKey = this.configService.get<string>('BYTEBOT_LLM_PROXY_API_KEY');

if (!proxyUrl) {
this.logger.warn(
Expand All @@ -41,7 +42,7 @@ export class ProxyService implements BytebotAgentService {

// Initialize OpenAI client with proxy configuration
this.openai = new OpenAI({
apiKey: 'dummy-key-for-proxy',
apiKey: proxyApiKey ?? 'dummy-key-for-proxy',
baseURL: proxyUrl,
});
}
Expand Down
14 changes: 11 additions & 3 deletions packages/bytebot-agent/src/tasks/tasks.controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ const anthropicApiKey = process.env.ANTHROPIC_API_KEY;
const openaiApiKey = process.env.OPENAI_API_KEY;

const proxyUrl = process.env.BYTEBOT_LLM_PROXY_URL;
const proxyApiKey = process.env.BYTEBOT_LLM_PROXY_API_KEY;

const models = [
...(anthropicApiKey ? ANTHROPIC_MODELS : []),
Expand Down Expand Up @@ -70,11 +71,18 @@ export class TasksController {
async getModels() {
if (proxyUrl) {
try {
const headers: Record<string, string> = {
'Content-Type': 'application/json',
};

if (proxyApiKey) {
// LiteLLM proxy is OpenAI-compatible → Bearer token expected
headers.Authorization = `Bearer ${proxyApiKey}`;
}

const response = await fetch(`${proxyUrl}/model/info`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
headers,
});

if (!response.ok) {
Expand Down