Skip to content

Commit 8671527

Browse files
authored
feat: Enhance BAML configuration and proxy settings in VSCode extension (#1489)
- Update event listeners to support configuration updates - Add proxy URL and settings display in environment variables panel <!-- ELLIPSIS_HIDDEN --> ---- > [!IMPORTANT] > Enhance BAML configuration and proxy settings in VSCode extension by updating event listeners, adding proxy display, and improving configuration management. > > - **Behavior**: > - Update event listeners in `EventListener.tsx` to handle `baml_settings_updated` command and update BAML configuration. > - Add proxy URL and settings display in `env-vars.tsx` with a checkbox to toggle proxy settings. > - Modify error messages in `request.rs` to include more descriptive text and handle proxy suggestions for `wasm32`. > - **Configuration**: > - Introduce `bamlConfig` and `BamlConfigAtom` in `bamlConfig.ts` to manage BAML settings. > - Update `vscode-rpc.ts` and `vscode.ts` to handle `SET_PROXY_SETTINGS` command. > - Implement `bamlConfigSchema` in `bamlConfig.ts` to validate configuration structure. > - **Misc**: > - Format code in `request.rs` and `stream_request.rs` for better readability. > - Add `baml_settings_updated` request handling in `server.ts` and `index.ts` to sync configuration changes. > - Remove unused `getIsProxyEnabled` function from `vscode.ts`. > > <sup>This description was created by </sup>[<img alt="Ellipsis" src="https://img.shields.io/badge/Ellipsis-blue?color=175173">](https://www.ellipsis.dev?ref=BoundaryML%2Fbaml&utm_source=github&utm_medium=referral)<sup> for da5ea33. It will automatically update as commits are pushed.</sup> <!-- ELLIPSIS_HIDDEN -->
1 parent d30220d commit 8671527

File tree

15 files changed

+329
-160
lines changed

15 files changed

+329
-160
lines changed

engine/baml-runtime/src/internal/llm_client/primitive/request.rs

Lines changed: 50 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ pub async fn make_request(
5454
start_time: system_now,
5555
request_options: client.request_options().clone(),
5656
latency: instant_now.elapsed(),
57-
message: format!("{:#?}", e),
57+
message: format!("Failed to create request builder: {:#?}", e),
5858
code: ErrorCode::Other(2),
5959
}));
6060
}
@@ -70,7 +70,7 @@ pub async fn make_request(
7070
start_time: system_now,
7171
request_options: client.request_options().clone(),
7272
latency: instant_now.elapsed(),
73-
message: format!("{:#?}", e),
73+
message: format!("Failed to build request: {:#?}", e),
7474
code: ErrorCode::Other(2),
7575
}));
7676
}
@@ -86,8 +86,22 @@ pub async fn make_request(
8686
start_time: system_now,
8787
request_options: client.request_options().clone(),
8888
latency: instant_now.elapsed(),
89-
message: format!("{:?}", e),
90-
code: ErrorCode::Other(2),
89+
message: {
90+
#[cfg(not(target_arch = "wasm32"))]
91+
{
92+
format!("{}", e.to_string())
93+
}
94+
#[cfg(target_arch = "wasm32")]
95+
{
96+
format!(
97+
"{}\n\nIf you haven't yet, try enabling the proxy (See API Keys button)",
98+
e.to_string()
99+
)
100+
}
101+
},
102+
code: e
103+
.status()
104+
.map_or(ErrorCode::Other(2), |s| ErrorCode::from_status(s)),
91105
}));
92106
}
93107
};
@@ -149,9 +163,37 @@ pub async fn make_parsed_request(
149163
};
150164

151165
match response_type {
152-
ResponseType::OpenAI => super::openai::response_handler::parse_openai_response(client, prompt, response_body, system_now, instant_now, model_name),
153-
ResponseType::Anthropic => super::anthropic::response_handler::parse_anthropic_response(client, prompt, response_body, system_now, instant_now, model_name),
154-
ResponseType::Google => super::google::response_handler::parse_google_response(client, prompt, response_body, system_now, instant_now, model_name),
155-
ResponseType::Vertex => super::vertex::response_handler::parse_vertex_response(client, prompt, response_body, system_now, instant_now, model_name),
166+
ResponseType::OpenAI => super::openai::response_handler::parse_openai_response(
167+
client,
168+
prompt,
169+
response_body,
170+
system_now,
171+
instant_now,
172+
model_name,
173+
),
174+
ResponseType::Anthropic => super::anthropic::response_handler::parse_anthropic_response(
175+
client,
176+
prompt,
177+
response_body,
178+
system_now,
179+
instant_now,
180+
model_name,
181+
),
182+
ResponseType::Google => super::google::response_handler::parse_google_response(
183+
client,
184+
prompt,
185+
response_body,
186+
system_now,
187+
instant_now,
188+
model_name,
189+
),
190+
ResponseType::Vertex => super::vertex::response_handler::parse_vertex_response(
191+
client,
192+
prompt,
193+
response_body,
194+
system_now,
195+
instant_now,
196+
model_name,
197+
),
156198
}
157199
}

engine/baml-runtime/src/internal/llm_client/primitive/stream_request.rs

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,17 @@ use crate::internal::llm_client::{
77
use anyhow::{Context, Result};
88
use baml_types::BamlMap;
99
use eventsource_stream::Eventsource;
10-
use futures::StreamExt;
10+
use futures::{StreamExt, TryStreamExt};
1111
use internal_baml_jinja::RenderedChatMessage;
1212
use reqwest::Response;
1313
use serde::de::DeserializeOwned;
1414

1515
use super::{
16-
anthropic::response_handler::scan_anthropic_response_stream, google::response_handler::scan_google_response_stream, openai::response_handler::scan_openai_response_stream, request::{make_request, to_prompt, RequestBuilder, ResponseType}, vertex::response_handler::scan_vertex_response_stream
16+
anthropic::response_handler::scan_anthropic_response_stream,
17+
google::response_handler::scan_google_response_stream,
18+
openai::response_handler::scan_openai_response_stream,
19+
request::{make_request, to_prompt, RequestBuilder, ResponseType},
20+
vertex::response_handler::scan_vertex_response_stream,
1721
};
1822

1923
pub async fn make_stream_request(

typescript/playground-common/src/baml_wasm_web/EventListener.tsx

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ import { CodeMirrorDiagnosticsAtom } from '@/shared/baml-project-panel/codemirro
2020
import { AlertTriangle, XCircle } from 'lucide-react'
2121
import { CheckCircle } from 'lucide-react'
2222
import { useDebounce, useDebounceCallback } from '@react-hook/debounce'
23+
import { bamlConfig, BamlConfigAtom } from './bamlConfig'
2324

2425
export const hasClosedEnvVarsDialogAtom = atomWithStorage<boolean>(
2526
'has-closed-env-vars-dialog',
@@ -84,6 +85,7 @@ export const EventListener: React.FC<{ children: React.ReactNode }> = ({ childre
8485

8586
const [selectedFunc, setSelectedFunction] = useAtom(selectedFunctionAtom)
8687
const setSelectedTestcase = useSetAtom(selectedTestcaseAtom)
88+
const setBamlConfig = useSetAtom(bamlConfig)
8789
const [bamlCliVersion, setBamlCliVersion] = useAtom(bamlCliVersionAtom)
8890
const { setRunningTests } = useRunTests()
8991
const wasm = useAtomValue(wasmAtom)
@@ -156,6 +158,10 @@ export const EventListener: React.FC<{ children: React.ReactNode }> = ({ childre
156158
command: 'baml_cli_version'
157159
content: string
158160
}
161+
| {
162+
command: 'baml_settings_updated'
163+
content: BamlConfigAtom
164+
}
159165
| {
160166
command: 'run_test'
161167
content: { test_name: string }
@@ -184,6 +190,10 @@ export const EventListener: React.FC<{ children: React.ReactNode }> = ({ childre
184190
updateCursor(content.cursor)
185191
}
186192
break
193+
case 'baml_settings_updated':
194+
console.log('baml_settings_updated', content)
195+
setBamlConfig(content)
196+
break
187197
case 'baml_cli_version':
188198
console.log('baml_cli_version', content)
189199
setBamlCliVersion(content)
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
// COPIED FROM ./vscode-ext/packages/vscode/src/plugins/language-server/bamlConfig.ts
2+
3+
import { atom } from 'jotai'
4+
import { z } from 'zod'
5+
export const bamlConfigSchema = z
6+
.object({
7+
cliPath: z.optional(z.string().nullable()).default(null),
8+
generateCodeOnSave: z.enum(['never', 'always']).default('always'),
9+
restartTSServerOnSave: z.boolean().default(false),
10+
enablePlaygroundProxy: z.boolean().default(true),
11+
envCommand: z.string().default('env'),
12+
fileWatcher: z.boolean().default(false),
13+
trace: z.object({
14+
server: z.enum(['off', 'messages', 'verbose']).default('off'),
15+
}),
16+
bamlPanelOpen: z.boolean().default(false),
17+
})
18+
.partial()
19+
type BamlConfig = z.infer<typeof bamlConfigSchema>
20+
21+
export type BamlConfigAtom = { config: BamlConfig | null; cliVersion: string | null }
22+
23+
export const bamlConfig = atom<BamlConfigAtom>({
24+
config: null,
25+
cliVersion: null,
26+
})

typescript/playground-common/src/shared/baml-project-panel/atoms.ts

Lines changed: 26 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import { type ICodeBlock } from './types'
66
import { vscodeLocalStorageStore } from './Jotai'
77
import { orchIndexAtom } from './playground-panel/atoms-orch-graph'
88
import { vscode } from './vscode'
9+
import { bamlConfig } from '../../baml_wasm_web/bamlConfig'
910

1011
const wasmAtomAsync = atom(async () => {
1112
const wasm = await import('@gloo-ai/baml-schema-wasm-web/baml_schema_build')
@@ -157,21 +158,12 @@ export const generatedFilesByLangAtom = atomFamily((lang: ICodeBlock['language']
157158

158159
export const isPanelVisibleAtom = atom(false)
159160

160-
const vscodeSettingsAtom = unwrap(
161-
atom(async () => {
162-
try {
163-
const res = await vscode.getIsProxyEnabled()
164-
return {
165-
enablePlaygroundProxy: res,
166-
}
167-
} catch (e) {
168-
console.error(`Error occurred while getting vscode settings:\n${JSON.stringify(e)}`)
169-
return {
170-
enablePlaygroundProxy: true,
171-
}
172-
}
173-
}),
174-
)
161+
const vscodeSettingsAtom = atom<{ enablePlaygroundProxy: boolean }>((get) => {
162+
const config = get(bamlConfig)
163+
return {
164+
enablePlaygroundProxy: config.config?.enablePlaygroundProxy ?? true,
165+
}
166+
})
175167

176168
const playgroundPortAtom = unwrap(
177169
atom(async () => {
@@ -185,6 +177,17 @@ const playgroundPortAtom = unwrap(
185177
}),
186178
)
187179

180+
export const proxyUrlAtom = atom((get) => {
181+
const vscodeSettings = get(vscodeSettingsAtom)
182+
const port = get(playgroundPortAtom)
183+
const proxyUrl = port && port !== 0 ? `http://localhost:${port}` : undefined
184+
const proxyEnabled = !!vscodeSettings?.enablePlaygroundProxy
185+
return {
186+
proxyEnabled,
187+
proxyUrl,
188+
}
189+
})
190+
188191
export const resetEnvKeyValuesAtom = atom(null, (get, set) => {
189192
set(envKeyValueStorage, [])
190193
})
@@ -233,23 +236,24 @@ export const envVarsAtom = atom(
233236
// NextJS environment doesnt have vscode settings, and proxy is always enabled
234237
return Object.fromEntries(defaultEnvKeyValues.map(([k, v]) => [k, v]))
235238
} else {
236-
const vscodeSettings = get(vscodeSettingsAtom)
237-
console.log('vscodeSettings', vscodeSettings)
238-
if (vscodeSettings?.enablePlaygroundProxy !== undefined && !vscodeSettings?.enablePlaygroundProxy) {
239+
const { proxyEnabled, proxyUrl } = get(proxyUrlAtom)
240+
if (!proxyEnabled) {
239241
// filter it out
240242
const envKeyValues = get(envKeyValuesAtom)
241243
return Object.fromEntries(envKeyValues.map(([k, v]) => [k, v]).filter(([k]) => k !== 'BOUNDARY_PROXY_URL'))
242244
}
243245

244246
const envKeyValues = get(envKeyValuesAtom)
245-
const port = get(playgroundPortAtom)
247+
if (proxyUrl === undefined) {
248+
return Object.fromEntries(envKeyValues.map(([k, v]) => [k, v]).filter(([k]) => k !== 'BOUNDARY_PROXY_URL'))
249+
}
246250
const entries = envKeyValues.map(([k, v]) => {
247-
if (k === 'BOUNDARY_PROXY_URL' && port !== 0) {
248-
return [k, `http://localhost:${port}`]
251+
if (k === 'BOUNDARY_PROXY_URL') {
252+
return [k, proxyUrl]
249253
}
250254
return [k, v]
251255
})
252-
return Object.fromEntries(entries)
256+
return Object.fromEntries(entries.filter((e) => e !== undefined))
253257
}
254258
},
255259
(get, set, newEnvVars: Record<string, string>) => {

0 commit comments

Comments
 (0)