Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
200 changes: 200 additions & 0 deletions core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ mod unit_tests {
use crate::models::workflow::*;
use crate::models::task::*;
use crate::models::map::*;
use serde_json::json;

#[test]
fn create_workflow() {
Expand Down Expand Up @@ -37,4 +38,203 @@ mod unit_tests {
assert_eq!(workflow.document.summary, summary);
}

#[test]
fn test_for_loop_definition_each_field_deserialization() {
// This test verifies that ForLoopDefinition correctly deserializes "each"
let for_loop_json = serde_json::json!({
"each": "item",
"in": ".items"
});

let result: Result<ForLoopDefinition, _> = serde_json::from_value(for_loop_json);

match result {
Ok(for_loop) => {
assert_eq!(for_loop.each, "item", "The 'each' field should be 'item'");
assert_eq!(for_loop.in_, ".items", "The 'in' field should be '.items'");
}
Err(e) => {
panic!(
"Failed to deserialize ForLoopDefinition with 'each' field: {}",
e
);
}
}
}

#[test]
fn test_for_task_deserialization() {
// This is a valid For task - it has a "for" field and a "do" field
let for_task_json = json!({
"for": {
"each": "item",
"in": ".items"
},
"do": [
{
"processItem": {
"call": "processFunction",
"with": {
"item": "${ .item }"
}
}
}
]
});

let result: Result<TaskDefinition, _> = serde_json::from_value(for_task_json.clone());

match result {
Ok(TaskDefinition::For(for_def)) => {
assert_eq!(for_def.for_.each, "item");
assert_eq!(for_def.for_.in_, ".items");
assert_eq!(for_def.do_.entries.len(), 1);
let has_process_item = for_def
.do_
.entries
.iter()
.any(|entry| entry.contains_key("processItem"));
assert!(
has_process_item,
"For task should contain processItem subtask"
);
}
Ok(TaskDefinition::Do(_)) => {
panic!("For task incorrectly deserialized as DoTaskDefinition");
}
Ok(other) => {
panic!("For task deserialized as unexpected variant: {:?}", other);
}
Err(e) => {
panic!("Failed to deserialize For task: {}", e);
}
}
}

#[test]
fn test_do_task_deserialization() {
// This is a valid Do task
let do_task_json = json!({
"do": [
{
"step1": {
"call": "function1"
}
},
{
"step2": {
"call": "function2"
}
}
]
});

let result: Result<TaskDefinition, _> = serde_json::from_value(do_task_json);

match result {
Ok(TaskDefinition::Do(do_def)) => {
assert_eq!(do_def.do_.entries.len(), 2);
let has_step1 = do_def
.do_
.entries
.iter()
.any(|entry| entry.contains_key("step1"));
let has_step2 = do_def
.do_
.entries
.iter()
.any(|entry| entry.contains_key("step2"));
assert!(has_step1, "Do task should contain step1");
assert!(has_step2, "Do task should contain step2");
}
Ok(other) => {
panic!("Do task deserialized as unexpected variant: {:?}", other);
}
Err(e) => {
panic!("Failed to deserialize Do task: {}", e);
}
}
}

#[test]
fn test_for_task_with_while_condition() {
// TestFor task with a while condition
let for_task_json = json!({
"for": {
"each": "user",
"in": ".users",
"at": "index"
},
"while": "${ .index < 10 }",
"do": [
{
"notifyUser": {
"call": "notifyUser",
"with": {
"user": "${ .user }",
"index": "${ .index }"
}
}
}
]
});

let result: Result<TaskDefinition, _> = serde_json::from_value(for_task_json.clone());

match result {
Ok(TaskDefinition::For(for_def)) => {
assert_eq!(for_def.for_.each, "user");
assert_eq!(for_def.for_.in_, ".users");
assert_eq!(for_def.for_.at, Some("index".to_string()));
assert_eq!(for_def.while_, Some("${ .index < 10 }".to_string()));
assert_eq!(for_def.do_.entries.len(), 1);
}
Ok(TaskDefinition::Do(_)) => {
panic!("For task incorrectly deserialized as DoTaskDefinition");
}
Ok(other) => {
panic!("For task deserialized as unexpected variant: {:?}", other);
}
Err(e) => {
panic!("Failed to deserialize For task with while: {}", e);
}
}
}

#[test]
fn test_roundtrip_serialization() {
// Create a ForTaskDefinition programmatically

let for_loop = ForLoopDefinition::new("item", ".collection", None, None);
let mut do_tasks = Map::new();
do_tasks.add(
"task1".to_string(),
TaskDefinition::Call(CallTaskDefinition::new("someFunction", None, None)),
);

let for_task = ForTaskDefinition::new(for_loop, do_tasks, None);
let task_def = TaskDefinition::For(for_task);

// Serialize to JSON
let json_str = serde_json::to_string(&task_def).expect("Failed to serialize");
println!("Serialized: {}", json_str);

// Deserialize back
let deserialized: TaskDefinition =
serde_json::from_str(&json_str).expect("Failed to deserialize");

// Should still be a For task
match deserialized {
TaskDefinition::For(for_def) => {
assert_eq!(for_def.for_.each, "item");
assert_eq!(for_def.for_.in_, ".collection");
}
TaskDefinition::Do(_) => {
panic!("After roundtrip serialization, For task became a Do task");
}
other => {
panic!("Unexpected variant after roundtrip: {:?}", other);
}
}
}
}
91 changes: 89 additions & 2 deletions core/src/models/task.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ impl ProcessType {
}

/// Represents a value that can be any of the supported task definitions
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[derive(Debug, Clone, PartialEq, Serialize)]
#[serde(untagged)]
pub enum TaskDefinition{
/// Variant holding the definition of a 'call' task
Expand Down Expand Up @@ -84,6 +84,93 @@ pub enum TaskDefinition{
Wait(WaitTaskDefinition)
}

// Custom deserializer to handle For vs Do ambiguity
impl<'de> serde::Deserialize<'de> for TaskDefinition {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let value = Value::deserialize(deserializer)?;

// Check for 'for' field first - if present, it's a For task
if value.get("for").is_some() {
return ForTaskDefinition::deserialize(value)
.map(TaskDefinition::For)
.map_err(serde::de::Error::custom);
}

// Try other variants in priority order
if value.get("call").is_some() {
return CallTaskDefinition::deserialize(value)
.map(TaskDefinition::Call)
.map_err(serde::de::Error::custom);
}

if value.get("set").is_some() {
return SetTaskDefinition::deserialize(value)
.map(TaskDefinition::Set)
.map_err(serde::de::Error::custom);
}

if value.get("fork").is_some() {
return ForkTaskDefinition::deserialize(value)
.map(TaskDefinition::Fork)
.map_err(serde::de::Error::custom);
}

if value.get("run").is_some() {
return RunTaskDefinition::deserialize(value)
.map(TaskDefinition::Run)
.map_err(serde::de::Error::custom);
}

if value.get("switch").is_some() {
return SwitchTaskDefinition::deserialize(value)
.map(TaskDefinition::Switch)
.map_err(serde::de::Error::custom);
}

if value.get("try").is_some() {
return TryTaskDefinition::deserialize(value)
.map(TaskDefinition::Try)
.map_err(serde::de::Error::custom);
}

if value.get("emit").is_some() {
return EmitTaskDefinition::deserialize(value)
.map(TaskDefinition::Emit)
.map_err(serde::de::Error::custom);
}

if value.get("raise").is_some() {
return RaiseTaskDefinition::deserialize(value)
.map(TaskDefinition::Raise)
.map_err(serde::de::Error::custom);
}

if value.get("wait").is_some() {
return WaitTaskDefinition::deserialize(value)
.map(TaskDefinition::Wait)
.map_err(serde::de::Error::custom);
}

if value.get("listen").is_some() {
return ListenTaskDefinition::deserialize(value)
.map(TaskDefinition::Listen)
.map_err(serde::de::Error::custom);
}

// If we get here and there's a 'do' field, it's a Do task (not a For task)
if value.get("do").is_some() {
return DoTaskDefinition::deserialize(value)
.map(TaskDefinition::Do)
.map_err(serde::de::Error::custom);
}

Err(serde::de::Error::custom("unknown task type"))
}
}

/// A trait that all task definitions must implement
pub trait TaskDefinitionBase {
/// Gets the task's type
Expand Down Expand Up @@ -305,7 +392,7 @@ impl ForTaskDefinition {
pub struct ForLoopDefinition{

/// Gets/sets the name of the variable that represents each element in the collection during iteration
#[serde(rename = "emit")]
#[serde(rename = "each")]
pub each: String,

/// Gets/sets the runtime expression used to get the collection to iterate over
Expand Down
Loading