Skip to content

Commit 4b338f6

Browse files
fix(frontend): add support for 'mode' discriminator in oneOf rendering
Update ArgInput.svelte to properly handle oneOf schemas that use 'mode' as the discriminator field, in addition to the existing 'kind' and 'label' support. Changes: - Updated tagKey derivation to check for 'mode' first, then 'kind', then 'label' - Added 'mode' to the onOneOfChange function to track mode changes - Added 'mode' to the list of keys excluded from enum validation - Added 'mode' to hiddenArgs to prevent it from being shown in the form - Added title fields to the history oneOf variants in flowInfers.ts This allows the AI agent's history field to properly render with toggle buttons for 'auto' and 'manual' modes. Co-authored-by: centdix <centdix@users.noreply.github.com>
1 parent 714b54e commit 4b338f6

File tree

3 files changed

+35
-22
lines changed

3 files changed

+35
-22
lines changed

backend/windmill-worker/src/ai_executor.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -424,14 +424,14 @@ pub async fn run_agent(
424424

425425
// Load messages based on history mode
426426
if matches!(output_type, OutputType::Text) {
427-
match history {
427+
match &history {
428428
Some(History::Manual { messages: manual_messages }) => {
429429
// Use explicitly provided messages (bypass memory)
430430
if !manual_messages.is_empty() {
431-
messages.extend(manual_messages);
431+
messages.extend(manual_messages.clone());
432432
}
433433
}
434-
Some(History::Auto { context_length }) if context_length > 0 => {
434+
Some(History::Auto { context_length }) if *context_length > 0 => {
435435
// Auto mode: load from memory
436436
if let Some(step_id) = job.flow_step_id.as_deref() {
437437
if let Some(memory_id) = flow_context
@@ -443,7 +443,7 @@ pub async fn run_agent(
443443
match read_from_memory(db, &job.workspace_id, memory_id, step_id).await {
444444
Ok(Some(loaded_messages)) => {
445445
// Take the last n messages
446-
let start_idx = loaded_messages.len().saturating_sub(context_length);
446+
let start_idx = loaded_messages.len().saturating_sub(*context_length);
447447
let mut messages_to_load = loaded_messages[start_idx..].to_vec();
448448
let first_non_tool_message_index =
449449
messages_to_load.iter().position(|m| m.role != "tool");
@@ -934,16 +934,16 @@ pub async fn run_agent(
934934
// Skip memory persistence if using manual messages (bypass memory entirely)
935935
// final_messages contains the complete history (old messages + new ones)
936936
if matches!(output_type, OutputType::Text) && !use_manual_messages {
937-
if let Some(History::Auto { context_length }) = history {
938-
if context_length > 0 {
937+
if let Some(History::Auto { context_length }) = &history {
938+
if *context_length > 0 {
939939
if let Some(step_id) = job.flow_step_id.as_deref() {
940940
// Extract OpenAIMessages from final_messages
941941
let all_messages: Vec<OpenAIMessage> =
942942
final_messages.iter().map(|m| m.message.clone()).collect();
943943

944944
if !all_messages.is_empty() {
945945
// Keep only the last n messages
946-
let start_idx = all_messages.len().saturating_sub(context_length);
946+
let start_idx = all_messages.len().saturating_sub(*context_length);
947947
let messages_to_persist = all_messages[start_idx..].to_vec();
948948

949949
if let Some(memory_id) = flow_context.flow_status.and_then(|fs| fs.memory_id) {

frontend/src/lib/components/ArgInput.svelte

Lines changed: 20 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,11 @@
202202
203203
let oneOfSelected: string | undefined = $state(undefined)
204204
let tagKey = $derived(
205-
oneOf?.find((o) => Object.keys(o.properties ?? {})?.includes('kind')) ? 'kind' : 'label'
205+
oneOf?.find((o) => Object.keys(o.properties ?? {})?.includes('mode'))
206+
? 'mode'
207+
: oneOf?.find((o) => Object.keys(o.properties ?? {})?.includes('kind'))
208+
? 'kind'
209+
: 'label'
206210
)
207211
async function updateOneOfSelected(oneOf: SchemaProperty[] | undefined) {
208212
if (
@@ -226,10 +230,13 @@
226230
function onOneOfChange() {
227231
const label = value?.['label']
228232
const kind = value?.['kind']
233+
const mode = value?.['mode']
229234
if (label && oneOf && oneOf.some((o) => o.title == label) && oneOfSelected != label) {
230235
oneOfSelected = label
231236
} else if (kind && oneOf && oneOf.some((o) => o.title == kind) && oneOfSelected != kind) {
232237
oneOfSelected = kind
238+
} else if (mode && oneOf && oneOf.some((o) => o.title == mode) && oneOfSelected != mode) {
239+
oneOfSelected = mode
233240
}
234241
}
235242
@@ -1073,7 +1080,7 @@
10731080
for (const key of newValueKeys) {
10741081
// Check if there is a select (enum) in the newly selected oneOf and if the current value is not in the enum, skip it
10751082
if (
1076-
!['kind', 'label'].includes(key) &&
1083+
!['kind', 'label', 'mode'].includes(key) &&
10771084
selectedObjProperties[key]?.enum &&
10781085
value &&
10791086
value[key] !== undefined &&
@@ -1083,9 +1090,11 @@
10831090
}
10841091
toKeep[key] = value[key]
10851092
}
1086-
const tagKey = oneOf.find((o) => Object.keys(o.properties ?? {}).includes('kind'))
1087-
? 'kind'
1088-
: 'label'
1093+
const tagKey = oneOf.find((o) => Object.keys(o.properties ?? {}).includes('mode'))
1094+
? 'mode'
1095+
: oneOf.find((o) => Object.keys(o.properties ?? {}).includes('kind'))
1096+
? 'kind'
1097+
: 'label'
10891098
value = { ...toKeep, [tagKey]: detail }
10901099
}}
10911100
>
@@ -1122,9 +1131,11 @@
11221131
}
11231132
bind:args={value}
11241133
hiddenArgs={[
1125-
oneOf?.find((o) => Object.keys(o.properties ?? {}).includes('kind'))
1126-
? 'kind'
1127-
: 'label'
1134+
oneOf?.find((o) => Object.keys(o.properties ?? {}).includes('mode'))
1135+
? 'mode'
1136+
: oneOf?.find((o) => Object.keys(o.properties ?? {}).includes('kind'))
1137+
? 'kind'
1138+
: 'label'
11281139
]}
11291140
on:reorder={(e) => {
11301141
if (oneOf && oneOf[objIdx]) {
@@ -1143,7 +1154,7 @@
11431154
{disablePortal}
11441155
{disabled}
11451156
{prettifyHeader}
1146-
hiddenArgs={['label', 'kind']}
1157+
hiddenArgs={['label', 'kind', 'mode']}
11471158
schema={{
11481159
properties: obj.properties,
11491160
order: obj.order,

frontend/src/lib/components/flows/flowInfers.ts

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,8 @@ export const AI_AGENT_SCHEMA = {
3939
'Configure how conversation history is managed. Choose "auto" to automatically store and load messages from memory (up to N last messages), or "manual" to provide an explicit array of conversation messages. When using manual mode, memory is bypassed entirely - messages are not loaded from or saved to memory. The system_prompt (if provided) is always prepended, and user_message (if provided) is always appended as the final message.',
4040
oneOf: [
4141
{
42-
type: 'object' as const,
42+
type: 'object',
43+
title: 'auto',
4344
properties: {
4445
mode: {
4546
type: 'string',
@@ -59,7 +60,8 @@ export const AI_AGENT_SCHEMA = {
5960
'When no S3 storage is configured in your workspace settings, memory will be stored in database, which implies a limit of 100KB per memory entry. If you need to store more messages, you should use S3 storage in your workspace settings.'
6061
},
6162
{
62-
type: 'object' as const,
63+
type: 'object',
64+
title: 'manual',
6365
properties: {
6466
mode: {
6567
type: 'string',
@@ -71,7 +73,7 @@ export const AI_AGENT_SCHEMA = {
7173
type: 'array',
7274
description: 'Array of conversation messages to use as history',
7375
items: {
74-
type: 'object' as const,
76+
type: 'object',
7577
properties: {
7678
role: {
7779
type: 'string',
@@ -84,12 +86,12 @@ export const AI_AGENT_SCHEMA = {
8486
type: 'array',
8587
nullable: true,
8688
items: {
87-
type: 'object' as const,
89+
type: 'object',
8890
properties: {
8991
id: { type: 'string' },
9092
type: { type: 'string' },
9193
function: {
92-
type: 'object' as const,
94+
type: 'object',
9395
properties: {
9496
name: { type: 'string' },
9597
arguments: { type: 'string' }
@@ -112,7 +114,7 @@ export const AI_AGENT_SCHEMA = {
112114
}
113115
],
114116
showExpr: "fields.output_type === 'text'"
115-
},
117+
} as any,
116118
output_schema: {
117119
type: 'object',
118120
description: 'JSON schema that the AI agent will follow for its response format.',

0 commit comments

Comments
 (0)