From 3bbda19814ae5abcd8bbee8d7af13dad370e10e9 Mon Sep 17 00:00:00 2001 From: JaGeo Date: Thu, 23 Oct 2025 14:59:43 +0200 Subject: [PATCH 01/11] add the nested examples --- example_workflows/nested/jobflow.ipynb | 35 +++++++++++++++++++ example_workflows/nested/main.pwd.json | 14 ++++++++ example_workflows/nested/prod_div.json | 19 ++++++++++ .../{arithmetic => nested}/workflow.py | 0 4 files changed, 68 insertions(+) create mode 100644 example_workflows/nested/jobflow.ipynb create mode 100644 example_workflows/nested/main.pwd.json create mode 100644 example_workflows/nested/prod_div.json rename example_workflows/{arithmetic => nested}/workflow.py (100%) diff --git a/example_workflows/nested/jobflow.ipynb b/example_workflows/nested/jobflow.ipynb new file mode 100644 index 0000000..d4672cd --- /dev/null +++ b/example_workflows/nested/jobflow.ipynb @@ -0,0 +1,35 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "initial_id", + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": "from python_workflow_definition.aiida import load_workflow_json" + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/example_workflows/nested/main.pwd.json b/example_workflows/nested/main.pwd.json new file mode 100644 index 0000000..64737fb --- /dev/null +++ b/example_workflows/nested/main.pwd.json @@ -0,0 +1,14 @@ +{ + "version": "0.1.1", + "nodes": [ + { "id": 0, "type": "workflow", "value": "prod_div.json" }, + { "id": 1, "value": 1, "type": "input", "name": "a" }, + { "id": 2, "value": 2, "type": "input", "name": "b" }, + { "id": 3, "type": "output", "name": "final_result" } + ], + "edges": [ + { "target": 0, "targetPort": "x", "source": 1, "sourcePort": null }, + { "target": 0, "targetPort": "y", "source": 2, "sourcePort": null }, + { "target": 3, "targetPort": "null", "source": 0, "sourcePort": "result" } + ] +} \ No newline at end of file diff --git a/example_workflows/nested/prod_div.json b/example_workflows/nested/prod_div.json new file mode 100644 index 0000000..60c4221 --- /dev/null +++ b/example_workflows/nested/prod_div.json @@ -0,0 +1,19 @@ +{ + "version": "0.1.1", + "nodes": [ + { "id": 0, "type": "function", "value": "workflow.get_prod_and_div" }, + { "id": 1, "type": "function", "value": "workflow.get_sum" }, + { "id": 2, "type": "function", "value": "workflow.get_square" }, + { "id": 3, "type": "input", "value": 1, "name": "x" }, + { "id": 4, "type": "input", "value": 2, "name": "y" }, + { "id": 5, "type": "output", "name": "result" } + ], + "edges": [ + { "target": 0, "targetPort": "x", "source": 3, "sourcePort": null }, + { "target": 0, "targetPort": "y", "source": 4, "sourcePort": null }, + { "target": 1, "targetPort": "x", "source": 0, "sourcePort": "prod" }, + { "target": 1, "targetPort": "y", "source": 0, "sourcePort": "div" }, + { "target": 2, "targetPort": "x", "source": 1, "sourcePort": null }, + { "target": 5, "targetPort": null, "source": 2, "sourcePort": null } + ] +} \ No newline at end of file diff --git a/example_workflows/arithmetic/workflow.py b/example_workflows/nested/workflow.py similarity index 100% rename from example_workflows/arithmetic/workflow.py rename to example_workflows/nested/workflow.py From e4fa3ded46b5ed1bc69af280ad8f3acbe9dd7f32 Mon Sep 17 00:00:00 2001 From: JaGeo Date: Thu, 23 Oct 2025 15:08:52 +0200 Subject: [PATCH 02/11] add pydantic model for workflow --- example_workflows/nested/jobflow.ipynb | 57 ++++++++++++++++++++++-- src/python_workflow_definition/models.py | 21 +++++++++ 2 files changed, 75 insertions(+), 3 deletions(-) diff --git a/example_workflows/nested/jobflow.ipynb b/example_workflows/nested/jobflow.ipynb index d4672cd..dbf60c1 100644 --- a/example_workflows/nested/jobflow.ipynb +++ b/example_workflows/nested/jobflow.ipynb @@ -2,13 +2,64 @@ "cells": [ { "cell_type": "code", - "execution_count": null, "id": "initial_id", "metadata": { - "collapsed": true + "collapsed": true, + "ExecuteTime": { + "end_time": "2025-10-23T13:08:02.617292Z", + "start_time": "2025-10-23T13:08:02.205177Z" + } + }, + "source": "from python_workflow_definition.jobflow import load_workflow_json", + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/jgeorge/miniconda3/envs/2025_PWD_Extension_nested_flows/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], + "execution_count": 1 + }, + { + "metadata": { + "ExecuteTime": { + "end_time": "2025-10-23T13:08:03.884311Z", + "start_time": "2025-10-23T13:08:03.635773Z" + } }, + "cell_type": "code", + "source": "load_workflow_json(\"main.pwd.json\")", + "id": "7e1707c47e14fbcc", + "outputs": [ + { + "ename": "ModuleNotFoundError", + "evalue": "No module named 'prod_div'", + "output_type": "error", + "traceback": [ + "\u001B[31m---------------------------------------------------------------------------\u001B[39m", + "\u001B[31mModuleNotFoundError\u001B[39m Traceback (most recent call last)", + "\u001B[36mCell\u001B[39m\u001B[36m \u001B[39m\u001B[32mIn[2]\u001B[39m\u001B[32m, line 1\u001B[39m\n\u001B[32m----> \u001B[39m\u001B[32m1\u001B[39m \u001B[43mload_workflow_json\u001B[49m\u001B[43m(\u001B[49m\u001B[33;43m\"\u001B[39;49m\u001B[33;43mmain.pwd.json\u001B[39;49m\u001B[33;43m\"\u001B[39;49m\u001B[43m)\u001B[49m\n", + "\u001B[36mFile \u001B[39m\u001B[32m/smb/jgeorge/hpc-user/PycharmProjects/2025_PWD_Extension_nested_flows/python-workflow-definition/src/python_workflow_definition/jobflow.py:301\u001B[39m, in \u001B[36mload_workflow_json\u001B[39m\u001B[34m(file_name)\u001B[39m\n\u001B[32m 299\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(v, \u001B[38;5;28mstr\u001B[39m) \u001B[38;5;129;01mand\u001B[39;00m \u001B[33m\"\u001B[39m\u001B[33m.\u001B[39m\u001B[33m\"\u001B[39m \u001B[38;5;129;01min\u001B[39;00m v:\n\u001B[32m 300\u001B[39m p, m = v.rsplit(\u001B[33m\"\u001B[39m\u001B[33m.\u001B[39m\u001B[33m\"\u001B[39m, \u001B[32m1\u001B[39m)\n\u001B[32m--> \u001B[39m\u001B[32m301\u001B[39m mod = \u001B[43mimport_module\u001B[49m\u001B[43m(\u001B[49m\u001B[43mp\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 302\u001B[39m nodes_new_dict[\u001B[38;5;28mint\u001B[39m(k)] = \u001B[38;5;28mgetattr\u001B[39m(mod, m)\n\u001B[32m 303\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n", + "\u001B[36mFile \u001B[39m\u001B[32m~/miniconda3/envs/2025_PWD_Extension_nested_flows/lib/python3.11/importlib/__init__.py:126\u001B[39m, in \u001B[36mimport_module\u001B[39m\u001B[34m(name, package)\u001B[39m\n\u001B[32m 124\u001B[39m \u001B[38;5;28;01mbreak\u001B[39;00m\n\u001B[32m 125\u001B[39m level += \u001B[32m1\u001B[39m\n\u001B[32m--> \u001B[39m\u001B[32m126\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43m_bootstrap\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_gcd_import\u001B[49m\u001B[43m(\u001B[49m\u001B[43mname\u001B[49m\u001B[43m[\u001B[49m\u001B[43mlevel\u001B[49m\u001B[43m:\u001B[49m\u001B[43m]\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mpackage\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mlevel\u001B[49m\u001B[43m)\u001B[49m\n", + "\u001B[36mFile \u001B[39m\u001B[32m:1204\u001B[39m, in \u001B[36m_gcd_import\u001B[39m\u001B[34m(name, package, level)\u001B[39m\n", + "\u001B[36mFile \u001B[39m\u001B[32m:1176\u001B[39m, in \u001B[36m_find_and_load\u001B[39m\u001B[34m(name, import_)\u001B[39m\n", + "\u001B[36mFile \u001B[39m\u001B[32m:1140\u001B[39m, in \u001B[36m_find_and_load_unlocked\u001B[39m\u001B[34m(name, import_)\u001B[39m\n", + "\u001B[31mModuleNotFoundError\u001B[39m: No module named 'prod_div'" + ] + } + ], + "execution_count": 2 + }, + { + "metadata": {}, + "cell_type": "code", "outputs": [], - "source": "from python_workflow_definition.aiida import load_workflow_json" + "execution_count": null, + "source": "", + "id": "ae57f67080d3a26c" } ], "metadata": { diff --git a/src/python_workflow_definition/models.py b/src/python_workflow_definition/models.py index 4980cfa..78afd16 100644 --- a/src/python_workflow_definition/models.py +++ b/src/python_workflow_definition/models.py @@ -63,6 +63,26 @@ def check_value_format(cls, v: str): raise ValueError(msg) return v +class PythonWorkflowDefinitionWorklowNode(PythonWorkflowDefinitionBaseNode): + """ + Model for function execution nodes. + The 'name' attribute is computed automatically from 'value'. + """ + + type: Literal["workflow"] + value: str # Expected format: 'module.function' + + @field_validator("value") + @classmethod + def check_value_format(cls, v: str): + if not v or "." not in v or v.startswith(".") or v.endswith("."): + msg = ( + "FunctionNode 'value' must be a non-empty string ", + "in 'module.function' format with at least one period.", + ) + raise ValueError(msg) + return v + # Discriminated Union for Nodes PythonWorkflowDefinitionNode = Annotated[ @@ -70,6 +90,7 @@ def check_value_format(cls, v: str): PythonWorkflowDefinitionInputNode, PythonWorkflowDefinitionOutputNode, PythonWorkflowDefinitionFunctionNode, + PythonWorkflowDefinitionWorklowNode ], Field(discriminator="type"), ] From 630d69a6568d25531f577320ed252459af49567f Mon Sep 17 00:00:00 2001 From: Julian Geiger Date: Thu, 23 Oct 2025 19:14:29 +0200 Subject: [PATCH 03/11] first (apparently) working version --- .gitignore | 1 + example_workflows/nested/jobflow.ipynb | 86 ------------ example_workflows/nested/load_aiida.py | 146 +++++++++++++++++++++ src/python_workflow_definition/aiida.py | 165 ++++++++++++++++++++++++ 4 files changed, 312 insertions(+), 86 deletions(-) delete mode 100644 example_workflows/nested/jobflow.ipynb create mode 100644 example_workflows/nested/load_aiida.py diff --git a/.gitignore b/.gitignore index e464d07..93c5705 100644 --- a/.gitignore +++ b/.gitignore @@ -200,3 +200,4 @@ aiida_to_jobflow_qe.json pyiron_base_to_aiida_simple.json pyiron_base_to_jobflow_qe.json **/*.h5 +**/html/ diff --git a/example_workflows/nested/jobflow.ipynb b/example_workflows/nested/jobflow.ipynb deleted file mode 100644 index dbf60c1..0000000 --- a/example_workflows/nested/jobflow.ipynb +++ /dev/null @@ -1,86 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "id": "initial_id", - "metadata": { - "collapsed": true, - "ExecuteTime": { - "end_time": "2025-10-23T13:08:02.617292Z", - "start_time": "2025-10-23T13:08:02.205177Z" - } - }, - "source": "from python_workflow_definition.jobflow import load_workflow_json", - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/jgeorge/miniconda3/envs/2025_PWD_Extension_nested_flows/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", - " from .autonotebook import tqdm as notebook_tqdm\n" - ] - } - ], - "execution_count": 1 - }, - { - "metadata": { - "ExecuteTime": { - "end_time": "2025-10-23T13:08:03.884311Z", - "start_time": "2025-10-23T13:08:03.635773Z" - } - }, - "cell_type": "code", - "source": "load_workflow_json(\"main.pwd.json\")", - "id": "7e1707c47e14fbcc", - "outputs": [ - { - "ename": "ModuleNotFoundError", - "evalue": "No module named 'prod_div'", - "output_type": "error", - "traceback": [ - "\u001B[31m---------------------------------------------------------------------------\u001B[39m", - "\u001B[31mModuleNotFoundError\u001B[39m Traceback (most recent call last)", - "\u001B[36mCell\u001B[39m\u001B[36m \u001B[39m\u001B[32mIn[2]\u001B[39m\u001B[32m, line 1\u001B[39m\n\u001B[32m----> \u001B[39m\u001B[32m1\u001B[39m \u001B[43mload_workflow_json\u001B[49m\u001B[43m(\u001B[49m\u001B[33;43m\"\u001B[39;49m\u001B[33;43mmain.pwd.json\u001B[39;49m\u001B[33;43m\"\u001B[39;49m\u001B[43m)\u001B[49m\n", - "\u001B[36mFile \u001B[39m\u001B[32m/smb/jgeorge/hpc-user/PycharmProjects/2025_PWD_Extension_nested_flows/python-workflow-definition/src/python_workflow_definition/jobflow.py:301\u001B[39m, in \u001B[36mload_workflow_json\u001B[39m\u001B[34m(file_name)\u001B[39m\n\u001B[32m 299\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(v, \u001B[38;5;28mstr\u001B[39m) \u001B[38;5;129;01mand\u001B[39;00m \u001B[33m\"\u001B[39m\u001B[33m.\u001B[39m\u001B[33m\"\u001B[39m \u001B[38;5;129;01min\u001B[39;00m v:\n\u001B[32m 300\u001B[39m p, m = v.rsplit(\u001B[33m\"\u001B[39m\u001B[33m.\u001B[39m\u001B[33m\"\u001B[39m, \u001B[32m1\u001B[39m)\n\u001B[32m--> \u001B[39m\u001B[32m301\u001B[39m mod = \u001B[43mimport_module\u001B[49m\u001B[43m(\u001B[49m\u001B[43mp\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 302\u001B[39m nodes_new_dict[\u001B[38;5;28mint\u001B[39m(k)] = \u001B[38;5;28mgetattr\u001B[39m(mod, m)\n\u001B[32m 303\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n", - "\u001B[36mFile \u001B[39m\u001B[32m~/miniconda3/envs/2025_PWD_Extension_nested_flows/lib/python3.11/importlib/__init__.py:126\u001B[39m, in \u001B[36mimport_module\u001B[39m\u001B[34m(name, package)\u001B[39m\n\u001B[32m 124\u001B[39m \u001B[38;5;28;01mbreak\u001B[39;00m\n\u001B[32m 125\u001B[39m level += \u001B[32m1\u001B[39m\n\u001B[32m--> \u001B[39m\u001B[32m126\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43m_bootstrap\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_gcd_import\u001B[49m\u001B[43m(\u001B[49m\u001B[43mname\u001B[49m\u001B[43m[\u001B[49m\u001B[43mlevel\u001B[49m\u001B[43m:\u001B[49m\u001B[43m]\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mpackage\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mlevel\u001B[49m\u001B[43m)\u001B[49m\n", - "\u001B[36mFile \u001B[39m\u001B[32m:1204\u001B[39m, in \u001B[36m_gcd_import\u001B[39m\u001B[34m(name, package, level)\u001B[39m\n", - "\u001B[36mFile \u001B[39m\u001B[32m:1176\u001B[39m, in \u001B[36m_find_and_load\u001B[39m\u001B[34m(name, import_)\u001B[39m\n", - "\u001B[36mFile \u001B[39m\u001B[32m:1140\u001B[39m, in \u001B[36m_find_and_load_unlocked\u001B[39m\u001B[34m(name, import_)\u001B[39m\n", - "\u001B[31mModuleNotFoundError\u001B[39m: No module named 'prod_div'" - ] - } - ], - "execution_count": 2 - }, - { - "metadata": {}, - "cell_type": "code", - "outputs": [], - "execution_count": null, - "source": "", - "id": "ae57f67080d3a26c" - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 2 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.6" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/example_workflows/nested/load_aiida.py b/example_workflows/nested/load_aiida.py new file mode 100644 index 0000000..11845d4 --- /dev/null +++ b/example_workflows/nested/load_aiida.py @@ -0,0 +1,146 @@ +from python_workflow_definition.aiida import write_workflow_json, load_workflow_json, load_workflow_json_nested + +from aiida_workgraph import WorkGraph, task +from aiida import orm, load_profile + +load_profile() + +workflow_json_filename = "main.pwd.json" + + +# In[2]: + + +from workflow import ( + get_sum as _get_sum, + get_prod_and_div as _get_prod_and_div, + get_square as _get_square, +) + + +# wg = WorkGraph("nested") + + +# In[4]: + +wg = load_workflow_json_nested(workflow_json_filename) +wg.to_html() + +breakpoint() + +pass + +wg.run() + +# +# +# get_prod_and_div_task = wg.add_task( +# task(outputs=["prod", "div"])(_get_prod_and_div), +# x=orm.Float(1), +# y=orm.Float(2), +# ) +# +# +# # In[5]: +# +# +# get_sum_task = wg.add_task( +# _get_sum, +# x=get_prod_and_div_task.outputs.prod, +# y=get_prod_and_div_task.outputs.div, +# ) +# +# +# # In[6]: +# +# +# get_square_task = wg.add_task( +# _get_square, +# x=get_sum_task.outputs.result, +# ) +# +# +# # In[7]: +# +# +# write_workflow_json(wg=wg, file_name=workflow_json_filename) +# +# +# # In[8]: +# +# +# get_ipython().system("cat {workflow_json_filename}") +# +# +# # ## Load Workflow with jobflow +# +# # In[9]: +# +# +# from python_workflow_definition.jobflow import load_workflow_json +# +# +# # In[10]: +# +# +# from jobflow.managers.local import run_locally +# +# +# # In[11]: +# +# +# flow = load_workflow_json(file_name=workflow_json_filename) +# +# +# # In[12]: +# +# +# result = run_locally(flow) +# result +# +# +# # ## Load Workflow with pyiron_base +# +# # In[13]: +# +# +# from python_workflow_definition.pyiron_base import load_workflow_json +# +# +# # In[14]: +# +# +# delayed_object_lst = load_workflow_json(file_name=workflow_json_filename) +# delayed_object_lst[-1].draw() +# +# +# # In[15]: +# +# +# delayed_object_lst[-1].pull() +# +# +# # ## Load Workflow with pyiron_workflow +# +# # In[ ]: +# +# +# from python_workflow_definition.pyiron_workflow import load_workflow_json +# +# +# # In[ ]: +# +# +# wf = load_workflow_json(file_name=workflow_json_filename) +# +# +# # In[ ]: +# +# +# wf.draw(size=(10, 10)) +# +# +# # In[ ]: +# +# +# wf.run() diff --git a/src/python_workflow_definition/aiida.py b/src/python_workflow_definition/aiida.py index 16366ee..598100b 100644 --- a/src/python_workflow_definition/aiida.py +++ b/src/python_workflow_definition/aiida.py @@ -1,5 +1,7 @@ from importlib import import_module import traceback +from pathlib import Path +from typing import Any from aiida import orm from aiida_pythonjob.data.serializer import general_serializer @@ -23,6 +25,169 @@ ) +def load_workflow_json_nested(file_name: str) -> WorkGraph: + """Load a workflow from JSON with support for nested workflows. + + This function recursively loads workflows, properly exposing inputs/outputs + of nested workflows so they can be connected in the parent workflow. + """ + data = PythonWorkflowDefinitionWorkflow.load_json_file(file_name=file_name) + parent_dir = Path(file_name).parent + + # Check if this workflow has workflow-type nodes (nested workflows) + has_nested = any(n["type"] == "workflow" for n in data[NODES_LABEL]) + + # Extract input/output nodes for this workflow + input_nodes = [n for n in data[NODES_LABEL] if n["type"] == "input"] + output_nodes = [n for n in data[NODES_LABEL] if n["type"] == "output"] + + # Create WorkGraph with proper inputs/outputs if this will be used as a sub-workflow + if has_nested or input_nodes or output_nodes: + # Build namespace for inputs + inputs_ns = {} + for inp_node in input_nodes: + inputs_ns[inp_node["name"]] = namespace + + # Build namespace for outputs + outputs_ns = {} + for out_node in output_nodes: + outputs_ns[out_node["name"]] = namespace + + wg = WorkGraph( + inputs=namespace(**inputs_ns) if inputs_ns else None, + outputs=namespace(**outputs_ns) if outputs_ns else None + ) + else: + wg = WorkGraph() + + task_name_mapping = {} + input_node_mapping = {} # Map input node IDs to their names + + # Process nodes + for node in data[NODES_LABEL]: + node_id = str(node["id"]) + node_type = node["type"] + + if node_type == "function": + # Handle function nodes + func_path = node["value"] + p, m = func_path.rsplit(".", 1) + mod = import_module(p) + func = getattr(mod, m) + decorated_func = task(outputs=namespace())(func) + new_task = wg.add_task(decorated_func) + new_task.spec = replace(new_task.spec, schema_source=SchemaSource.EMBEDDED) + task_name_mapping[node_id] = new_task + + elif node_type == "workflow": + # Handle nested workflow nodes + workflow_file = node["value"] + # Resolve path relative to parent workflow file + workflow_path = parent_dir / workflow_file + + # Recursively load the sub-workflow with proper input/output exposure + sub_wg = load_workflow_json_nested(file_name=str(workflow_path)) + + # Add the sub-workflow as a task - it will automatically have the right inputs/outputs + workflow_task = wg.add_task(sub_wg) + task_name_mapping[node_id] = workflow_task + + elif node_type == "input": + # Store input node info for later connection to wg.inputs + input_node_mapping[node_id] = node["name"] + # Also create a data node for direct value setting if needed + if "value" in node and node["value"] is not None: + value = node["value"] + data_node = general_serializer(value) + task_name_mapping[node_id] = data_node + + elif node_type == "output": + # Output nodes will be handled when setting wg.outputs + pass + + # Add links + for link in data[EDGES_LABEL]: + source_id = str(link[SOURCE_LABEL]) + target_id = str(link[TARGET_LABEL]) + source_port = link[SOURCE_PORT_LABEL] + target_port = link[TARGET_PORT_LABEL] + + # Handle output node connections + target_node = next((n for n in data[NODES_LABEL] if str(n["id"]) == target_id), None) + if target_node and target_node["type"] == "output": + # This connects a task output to a workflow output + from_task = task_name_mapping.get(source_id) + if from_task and isinstance(from_task, Task): + if source_port is None: + source_port = "result" + if source_port not in from_task.outputs: + from_socket = from_task.add_output_spec("workgraph.any", name=source_port) + else: + from_socket = from_task.outputs[source_port] + + # Set the workflow output + output_name = target_node["name"] + if hasattr(wg.outputs, output_name): + setattr(wg.outputs, output_name, from_socket) + continue + + # Handle input node connections + source_node = next((n for n in data[NODES_LABEL] if str(n["id"]) == source_id), None) + if source_node and source_node["type"] == "input": + to_task = task_name_mapping.get(target_id) + if to_task and isinstance(to_task, Task): + # Add target socket if it doesn't exist + if target_port not in to_task.inputs: + to_socket = to_task.add_input_spec("workgraph.any", name=target_port) + else: + to_socket = to_task.inputs[target_port] + + # Connect from workflow input or from data node + if hasattr(wg.inputs, source_node["name"]): + # Connect from workflow input + from_socket = getattr(wg.inputs, source_node["name"]) + wg.add_link(from_socket, to_socket) + elif source_id in task_name_mapping: + # Connect from data node (has a value) + data_node = task_name_mapping[source_id] + if isinstance(data_node, orm.Data): + to_socket.value = data_node + continue + + # Handle regular task-to-task connections + to_task = task_name_mapping.get(target_id) + from_task = task_name_mapping.get(source_id) + + if to_task is None or from_task is None: + continue + + if isinstance(to_task, Task): + # Add target socket if needed + if target_port not in to_task.inputs: + to_socket = to_task.add_input_spec("workgraph.any", name=target_port) + else: + to_socket = to_task.inputs[target_port] + + if isinstance(from_task, orm.Data): + to_socket.value = from_task + elif isinstance(from_task, Task): + try: + if source_port is None: + source_port = "result" + + # Add source socket if needed + if source_port not in from_task.outputs: + from_socket = from_task.add_output_spec("workgraph.any", name=source_port) + else: + from_socket = from_task.outputs[source_port] + + wg.add_link(from_socket, to_socket) + except Exception as e: + traceback.print_exc() + print("Failed to link", link, "with error:", e) + + return wg + def load_workflow_json(file_name: str) -> WorkGraph: data = PythonWorkflowDefinitionWorkflow.load_json_file(file_name=file_name) From eebb50e22c593400a0a95f6531ddca8627d55d6c Mon Sep 17 00:00:00 2001 From: Julian Geiger Date: Fri, 24 Oct 2025 07:39:41 +0200 Subject: [PATCH 04/11] properly pass through default values to inner workgraph --- src/python_workflow_definition/aiida.py | 29 +++++++++++++++++++------ 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/src/python_workflow_definition/aiida.py b/src/python_workflow_definition/aiida.py index 598100b..53489fb 100644 --- a/src/python_workflow_definition/aiida.py +++ b/src/python_workflow_definition/aiida.py @@ -55,7 +55,7 @@ def load_workflow_json_nested(file_name: str) -> WorkGraph: wg = WorkGraph( inputs=namespace(**inputs_ns) if inputs_ns else None, - outputs=namespace(**outputs_ns) if outputs_ns else None + outputs=namespace(**outputs_ns) if outputs_ns else None, ) else: wg = WorkGraph() @@ -95,10 +95,14 @@ def load_workflow_json_nested(file_name: str) -> WorkGraph: elif node_type == "input": # Store input node info for later connection to wg.inputs input_node_mapping[node_id] = node["name"] - # Also create a data node for direct value setting if needed + # Set default value on the workflow's exposed input if provided if "value" in node and node["value"] is not None: value = node["value"] data_node = general_serializer(value) + # Set the default on the workflow's exposed input + if hasattr(wg.inputs, node["name"]): + setattr(wg.inputs, node["name"], data_node) + # Also store in mapping for direct connections in non-nested contexts task_name_mapping[node_id] = data_node elif node_type == "output": @@ -113,7 +117,9 @@ def load_workflow_json_nested(file_name: str) -> WorkGraph: target_port = link[TARGET_PORT_LABEL] # Handle output node connections - target_node = next((n for n in data[NODES_LABEL] if str(n["id"]) == target_id), None) + target_node = next( + (n for n in data[NODES_LABEL] if str(n["id"]) == target_id), None + ) if target_node and target_node["type"] == "output": # This connects a task output to a workflow output from_task = task_name_mapping.get(source_id) @@ -121,7 +127,9 @@ def load_workflow_json_nested(file_name: str) -> WorkGraph: if source_port is None: source_port = "result" if source_port not in from_task.outputs: - from_socket = from_task.add_output_spec("workgraph.any", name=source_port) + from_socket = from_task.add_output_spec( + "workgraph.any", name=source_port + ) else: from_socket = from_task.outputs[source_port] @@ -132,13 +140,17 @@ def load_workflow_json_nested(file_name: str) -> WorkGraph: continue # Handle input node connections - source_node = next((n for n in data[NODES_LABEL] if str(n["id"]) == source_id), None) + source_node = next( + (n for n in data[NODES_LABEL] if str(n["id"]) == source_id), None + ) if source_node and source_node["type"] == "input": to_task = task_name_mapping.get(target_id) if to_task and isinstance(to_task, Task): # Add target socket if it doesn't exist if target_port not in to_task.inputs: - to_socket = to_task.add_input_spec("workgraph.any", name=target_port) + to_socket = to_task.add_input_spec( + "workgraph.any", name=target_port + ) else: to_socket = to_task.inputs[target_port] @@ -177,7 +189,9 @@ def load_workflow_json_nested(file_name: str) -> WorkGraph: # Add source socket if needed if source_port not in from_task.outputs: - from_socket = from_task.add_output_spec("workgraph.any", name=source_port) + from_socket = from_task.add_output_spec( + "workgraph.any", name=source_port + ) else: from_socket = from_task.outputs[source_port] @@ -188,6 +202,7 @@ def load_workflow_json_nested(file_name: str) -> WorkGraph: return wg + def load_workflow_json(file_name: str) -> WorkGraph: data = PythonWorkflowDefinitionWorkflow.load_json_file(file_name=file_name) From 6128263a5668f7ccb1726d19600236de01ed0609 Mon Sep 17 00:00:00 2001 From: Julian Geiger Date: Fri, 24 Oct 2025 07:44:46 +0200 Subject: [PATCH 05/11] replace nested normal with nested. see if ci passes --- example_workflows/nested/load_aiida.py | 140 +----------------------- src/python_workflow_definition/aiida.py | 64 +---------- 2 files changed, 5 insertions(+), 199 deletions(-) diff --git a/example_workflows/nested/load_aiida.py b/example_workflows/nested/load_aiida.py index 11845d4..7cc7689 100644 --- a/example_workflows/nested/load_aiida.py +++ b/example_workflows/nested/load_aiida.py @@ -1,146 +1,12 @@ -from python_workflow_definition.aiida import write_workflow_json, load_workflow_json, load_workflow_json_nested +from python_workflow_definition.aiida import load_workflow_json -from aiida_workgraph import WorkGraph, task -from aiida import orm, load_profile +from aiida import load_profile load_profile() workflow_json_filename = "main.pwd.json" +wg = load_workflow_json(workflow_json_filename) -# In[2]: - - -from workflow import ( - get_sum as _get_sum, - get_prod_and_div as _get_prod_and_div, - get_square as _get_square, -) - - -# wg = WorkGraph("nested") - - -# In[4]: - -wg = load_workflow_json_nested(workflow_json_filename) wg.to_html() - -breakpoint() - -pass - wg.run() - -# -# -# get_prod_and_div_task = wg.add_task( -# task(outputs=["prod", "div"])(_get_prod_and_div), -# x=orm.Float(1), -# y=orm.Float(2), -# ) -# -# -# # In[5]: -# -# -# get_sum_task = wg.add_task( -# _get_sum, -# x=get_prod_and_div_task.outputs.prod, -# y=get_prod_and_div_task.outputs.div, -# ) -# -# -# # In[6]: -# -# -# get_square_task = wg.add_task( -# _get_square, -# x=get_sum_task.outputs.result, -# ) -# -# -# # In[7]: -# -# -# write_workflow_json(wg=wg, file_name=workflow_json_filename) -# -# -# # In[8]: -# -# -# get_ipython().system("cat {workflow_json_filename}") -# -# -# # ## Load Workflow with jobflow -# -# # In[9]: -# -# -# from python_workflow_definition.jobflow import load_workflow_json -# -# -# # In[10]: -# -# -# from jobflow.managers.local import run_locally -# -# -# # In[11]: -# -# -# flow = load_workflow_json(file_name=workflow_json_filename) -# -# -# # In[12]: -# -# -# result = run_locally(flow) -# result -# -# -# # ## Load Workflow with pyiron_base -# -# # In[13]: -# -# -# from python_workflow_definition.pyiron_base import load_workflow_json -# -# -# # In[14]: -# -# -# delayed_object_lst = load_workflow_json(file_name=workflow_json_filename) -# delayed_object_lst[-1].draw() -# -# -# # In[15]: -# -# -# delayed_object_lst[-1].pull() -# -# -# # ## Load Workflow with pyiron_workflow -# -# # In[ ]: -# -# -# from python_workflow_definition.pyiron_workflow import load_workflow_json -# -# -# # In[ ]: -# -# -# wf = load_workflow_json(file_name=workflow_json_filename) -# -# -# # In[ ]: -# -# -# wf.draw(size=(10, 10)) -# -# -# # In[ ]: -# -# -# wf.run() diff --git a/src/python_workflow_definition/aiida.py b/src/python_workflow_definition/aiida.py index 53489fb..c8631be 100644 --- a/src/python_workflow_definition/aiida.py +++ b/src/python_workflow_definition/aiida.py @@ -25,7 +25,7 @@ ) -def load_workflow_json_nested(file_name: str) -> WorkGraph: +def load_workflow_json(file_name: str) -> WorkGraph: """Load a workflow from JSON with support for nested workflows. This function recursively loads workflows, properly exposing inputs/outputs @@ -86,7 +86,7 @@ def load_workflow_json_nested(file_name: str) -> WorkGraph: workflow_path = parent_dir / workflow_file # Recursively load the sub-workflow with proper input/output exposure - sub_wg = load_workflow_json_nested(file_name=str(workflow_path)) + sub_wg = load_workflow_json(file_name=str(workflow_path)) # Add the sub-workflow as a task - it will automatically have the right inputs/outputs workflow_task = wg.add_task(sub_wg) @@ -203,66 +203,6 @@ def load_workflow_json_nested(file_name: str) -> WorkGraph: return wg -def load_workflow_json(file_name: str) -> WorkGraph: - - data = PythonWorkflowDefinitionWorkflow.load_json_file(file_name=file_name) - - wg = WorkGraph() - task_name_mapping = {} - - for id, identifier in convert_nodes_list_to_dict( - nodes_list=data[NODES_LABEL] - ).items(): - if isinstance(identifier, str) and "." in identifier: - p, m = identifier.rsplit(".", 1) - mod = import_module(p) - func = getattr(mod, m) - decorated_func = task(outputs=namespace())(func) - new_task = wg.add_task(decorated_func) - new_task.spec = replace(new_task.spec, schema_source=SchemaSource.EMBEDDED) - task_name_mapping[id] = new_task - else: - # data task - data_node = general_serializer(identifier) - task_name_mapping[id] = data_node - - # add links - for link in data[EDGES_LABEL]: - # TODO: continue here - to_task = task_name_mapping[str(link[TARGET_LABEL])] - # if the input is not exit, it means we pass the data into to the kwargs - # in this case, we add the input socket - if isinstance(to_task, Task): - if link[TARGET_PORT_LABEL] not in to_task.inputs: - to_socket = to_task.add_input_spec( - "workgraph.any", name=link[TARGET_PORT_LABEL] - ) - else: - to_socket = to_task.inputs[link[TARGET_PORT_LABEL]] - from_task = task_name_mapping[str(link[SOURCE_LABEL])] - if isinstance(from_task, orm.Data): - to_socket.value = from_task - else: - try: - if link[SOURCE_PORT_LABEL] is None: - link[SOURCE_PORT_LABEL] = "result" - # because we are not define the outputs explicitly during the pythonjob creation - # we add it here, and assume the output exit - if link[SOURCE_PORT_LABEL] not in from_task.outputs: - # if str(link["sourcePort"]) not in from_task.outputs: - from_socket = from_task.add_output_spec( - "workgraph.any", - name=link[SOURCE_PORT_LABEL], - ) - else: - from_socket = from_task.outputs[link[SOURCE_PORT_LABEL]] - if isinstance(to_task, Task): - wg.add_link(from_socket, to_socket) - except Exception as e: - traceback.print_exc() - print("Failed to link", link, "with error:", e) - return wg - def write_workflow_json(wg: WorkGraph, file_name: str) -> dict: data = {NODES_LABEL: [], EDGES_LABEL: []} From 08f3ab01dad0d004fddb5869a19abafde8dff489 Mon Sep 17 00:00:00 2001 From: Julian Geiger Date: Fri, 24 Oct 2025 07:48:55 +0200 Subject: [PATCH 06/11] restore arithmetic workflow.py --- example_workflows/arithmetic/workflow.py | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 example_workflows/arithmetic/workflow.py diff --git a/example_workflows/arithmetic/workflow.py b/example_workflows/arithmetic/workflow.py new file mode 100644 index 0000000..1a2e4c3 --- /dev/null +++ b/example_workflows/arithmetic/workflow.py @@ -0,0 +1,10 @@ +def get_prod_and_div(x, y): + return {"prod": x * y, "div": x / y} + + +def get_sum(x, y): + return x + y + + +def get_square(x): + return x ** 2 From 97b36ecdc82ed43c4d83a2d2ec2c72a330ccc4c8 Mon Sep 17 00:00:00 2001 From: Julian Geiger Date: Fri, 24 Oct 2025 08:13:46 +0200 Subject: [PATCH 07/11] additional pre- and post-processing in nested workflow --- example_workflows/nested/main.pwd.json | 23 +++++++++++++++-------- src/python_workflow_definition/aiida.py | 1 - 2 files changed, 15 insertions(+), 9 deletions(-) diff --git a/example_workflows/nested/main.pwd.json b/example_workflows/nested/main.pwd.json index 64737fb..df08ddb 100644 --- a/example_workflows/nested/main.pwd.json +++ b/example_workflows/nested/main.pwd.json @@ -1,14 +1,21 @@ { "version": "0.1.1", "nodes": [ - { "id": 0, "type": "workflow", "value": "prod_div.json" }, - { "id": 1, "value": 1, "type": "input", "name": "a" }, - { "id": 2, "value": 2, "type": "input", "name": "b" }, - { "id": 3, "type": "output", "name": "final_result" } + { "id": 0, "value": 3, "type": "input", "name": "a" }, + { "id": 1, "value": 2, "type": "input", "name": "b" }, + { "id": 2, "value": 4, "type": "input", "name": "c" }, + { "id": 3, "type": "function", "value": "workflow.get_prod_and_div" }, + { "id": 4, "type": "workflow", "value": "prod_div.json" }, + { "id": 5, "type": "function", "value": "workflow.get_sum" }, + { "id": 6, "type": "output", "name": "final_result" } ], "edges": [ - { "target": 0, "targetPort": "x", "source": 1, "sourcePort": null }, - { "target": 0, "targetPort": "y", "source": 2, "sourcePort": null }, - { "target": 3, "targetPort": "null", "source": 0, "sourcePort": "result" } + { "target": 3, "targetPort": "x", "source": 0, "sourcePort": null }, + { "target": 3, "targetPort": "y", "source": 2, "sourcePort": null }, + { "target": 4, "targetPort": "x", "source": 3, "sourcePort": "prod" }, + { "target": 4, "targetPort": "y", "source": 3, "sourcePort": "div" }, + { "target": 5, "targetPort": "x", "source": 4, "sourcePort": "result" }, + { "target": 5, "targetPort": "y", "source": 1, "sourcePort": null }, + { "target": 6, "targetPort": null, "source": 5, "sourcePort": null } ] -} \ No newline at end of file +} diff --git a/src/python_workflow_definition/aiida.py b/src/python_workflow_definition/aiida.py index c8631be..f4ebd7f 100644 --- a/src/python_workflow_definition/aiida.py +++ b/src/python_workflow_definition/aiida.py @@ -203,7 +203,6 @@ def load_workflow_json(file_name: str) -> WorkGraph: return wg - def write_workflow_json(wg: WorkGraph, file_name: str) -> dict: data = {NODES_LABEL: [], EDGES_LABEL: []} node_name_mapping = {} From 424a068b568876ee526422d36a5b25dad00a6168 Mon Sep 17 00:00:00 2001 From: Julian Geiger Date: Fri, 24 Oct 2025 08:40:44 +0200 Subject: [PATCH 08/11] add code to generate nested aiida-workflow --- example_workflows/nested/write_aiida.py | 55 ++++++++++++++++++++++++ src/python_workflow_definition/models.py | 3 +- 2 files changed, 57 insertions(+), 1 deletion(-) create mode 100644 example_workflows/nested/write_aiida.py diff --git a/example_workflows/nested/write_aiida.py b/example_workflows/nested/write_aiida.py new file mode 100644 index 0000000..523d8c8 --- /dev/null +++ b/example_workflows/nested/write_aiida.py @@ -0,0 +1,55 @@ +from aiida_workgraph import task +from aiida import load_profile + +load_profile() + + +@task(outputs=["prod", "div"]) +def get_prod_and_div(x, y): + return {"prod": x * y, "div": x / y} + + +@task +def get_sum(x, y): + return x + y + + +@task +def get_square(x): + return x**2 + + +@task.graph +def nested_workflow(x, y): + """Inner workflow from prod_div.json: + - get_prod_and_div(x, y) → prod, div + - get_sum(prod, div) → result + - get_square(result) → result + """ + prod_and_div = get_prod_and_div(x=x, y=y) + sum_result = get_sum(x=prod_and_div.prod, y=prod_and_div.div) + square_result = get_square(x=sum_result.result) + return square_result.result + + +@task.graph +def main_workflow(a, b, c): + """Outer workflow from main.pwd.json: + - Pre-processing: get_prod_and_div(a, c) → prod, div + - Nested workflow: nested_workflow(prod, div) → result + - Post-processing: get_sum(result, b) → final_result + """ + # Pre-processing step + preprocessing = get_prod_and_div(x=a, y=c) + + # Nested workflow + nested_result = nested_workflow(x=preprocessing.prod, y=preprocessing.div) + + # Post-processing step + final_result = get_sum(x=nested_result.result, y=b) + + return final_result.result + + +wg = main_workflow.build(a=3, b=2, c=4) +wg.run() diff --git a/src/python_workflow_definition/models.py b/src/python_workflow_definition/models.py index 78afd16..91d04c6 100644 --- a/src/python_workflow_definition/models.py +++ b/src/python_workflow_definition/models.py @@ -63,6 +63,7 @@ def check_value_format(cls, v: str): raise ValueError(msg) return v + class PythonWorkflowDefinitionWorklowNode(PythonWorkflowDefinitionBaseNode): """ Model for function execution nodes. @@ -90,7 +91,7 @@ def check_value_format(cls, v: str): PythonWorkflowDefinitionInputNode, PythonWorkflowDefinitionOutputNode, PythonWorkflowDefinitionFunctionNode, - PythonWorkflowDefinitionWorklowNode + PythonWorkflowDefinitionWorklowNode, ], Field(discriminator="type"), ] From d474622e35727b173b6a95056a137010ef59e51f Mon Sep 17 00:00:00 2001 From: Julian Geiger Date: Fri, 24 Oct 2025 11:32:37 +0200 Subject: [PATCH 09/11] round-trip working --- example_workflows/nested/round_trip.py | 149 ++++++++++++++++ example_workflows/nested/write_aiida.py | 104 +++++++---- src/python_workflow_definition/aiida.py | 217 +++++++++++++++++++++-- src/python_workflow_definition/shared.py | 2 +- 4 files changed, 419 insertions(+), 53 deletions(-) create mode 100644 example_workflows/nested/round_trip.py diff --git a/example_workflows/nested/round_trip.py b/example_workflows/nested/round_trip.py new file mode 100644 index 0000000..59c08ca --- /dev/null +++ b/example_workflows/nested/round_trip.py @@ -0,0 +1,149 @@ +""" +Round-trip test for nested workflows. + +This script demonstrates that: +1. Loading a nested workflow JSON preserves all structure and values +2. Exporting a loaded workflow produces identical JSON +3. Multiple round-trips are stable (load -> export -> load -> export produces identical results) +""" + +import json +from pathlib import Path +from python_workflow_definition.aiida import load_workflow_json, write_workflow_json +from aiida import load_profile + +# Load AiiDA profile +load_profile() + + +def compare_json_files(file1: str, file2: str) -> bool: + """Compare two JSON files for structural equality.""" + with open(file1) as f1, open(file2) as f2: + data1 = json.load(f1) + data2 = json.load(f2) + # Compare as sorted JSON strings to ignore ordering + return json.dumps(data1, sort_keys=True) == json.dumps(data2, sort_keys=True) + + +def print_workflow_info(wg, name: str): + """Print information about a loaded workflow.""" + print(f"\n{name}:") + + # Count tasks (excluding internal graph tasks) + task_count = len([t for t in wg.tasks if t.name not in ["graph_inputs", "graph_outputs", "graph_ctx"]]) + print(f" Tasks: {task_count}") + + # Show inputs + if hasattr(wg.inputs, '_sockets'): + print(" Inputs:") + for name, socket in wg.inputs._sockets.items(): + if not name.startswith('_') and name != 'metadata': + if hasattr(socket, 'value') and socket.value is not None: + value = socket.value.value if hasattr(socket.value, 'value') else socket.value + print(f" {name} = {value}") + + # Show outputs + if hasattr(wg.outputs, '_sockets'): + output_names = [name for name in wg.outputs._sockets.keys() + if not name.startswith('_') and name != 'metadata'] + if output_names: + print(f" Outputs: {', '.join(output_names)}") + + # Check for nested workflows + nested_count = 0 + for task in wg.tasks: + if hasattr(task, 'tasks'): + nested_tasks = [t for t in task.tasks if t.name not in ['graph_inputs', 'graph_outputs', 'graph_ctx']] + if len(nested_tasks) > 0: + nested_count += 1 + print(f" Nested workflow '{task.name}' with {len(nested_tasks)} tasks") + # Show nested workflow defaults + for subtask in task.tasks: + if subtask.name == 'graph_inputs' and hasattr(subtask, 'outputs'): + print(" Default inputs:") + for out in subtask.outputs: + if hasattr(out, '_name') and not out._name.startswith('_'): + value = out.value.value if hasattr(out.value, 'value') else out.value + print(f" {out._name} = {value}") + + +def main(): + print("=" * 70) + print("NESTED WORKFLOW ROUND-TRIP TEST") + print("=" * 70) + + # Define file paths + original_file = "main.pwd.json" + roundtrip1_file = "roundtrip1.pwd.json" + roundtrip2_file = "roundtrip2.pwd.json" + nested_original = "prod_div.json" + nested_export = "nested_1.json" + + # Test 1: Load original workflow + print("\n[1] Loading original workflow...") + wg_original = load_workflow_json(original_file) + print_workflow_info(wg_original, "Original workflow") + + # Test 2: Export to roundtrip1 + print("\n[2] Exporting to roundtrip1.pwd.json...") + write_workflow_json(wg_original, roundtrip1_file) + print(f" Exported main workflow to {roundtrip1_file}") + if Path(nested_export).exists(): + print(f" Exported nested workflow to {nested_export}") + + # Test 3: Load roundtrip1 + print("\n[3] Loading roundtrip1.pwd.json...") + wg_roundtrip1 = load_workflow_json(roundtrip1_file) + print_workflow_info(wg_roundtrip1, "Roundtrip 1 workflow") + + # Test 4: Export to roundtrip2 + print("\n[4] Exporting to roundtrip2.pwd.json...") + write_workflow_json(wg_roundtrip1, roundtrip2_file) + print(f" Exported to {roundtrip2_file}") + + # Test 5: Compare files + print("\n[5] Comparing JSON files...") + print("-" * 70) + + # Compare main workflows + main_match = compare_json_files(roundtrip1_file, roundtrip2_file) + print(f" roundtrip1 == roundtrip2: {'PASS' if main_match else 'FAIL'}") + + # Compare nested workflows + if Path(nested_original).exists() and Path(nested_export).exists(): + nested_match = compare_json_files(nested_original, nested_export) + print(f" {nested_original} == {nested_export}: {'PASS' if nested_match else 'FAIL'}") + else: + nested_match = True # If files don't exist, consider it a pass + + # Test 6: Load roundtrip2 and verify + print("\n[6] Loading roundtrip2.pwd.json for verification...") + wg_roundtrip2 = load_workflow_json(roundtrip2_file) + print_workflow_info(wg_roundtrip2, "Roundtrip 2 workflow") + + # Final verdict + print("\n" + "=" * 70) + if main_match and nested_match: + print("RESULT: ALL TESTS PASSED") + print(" - Workflow structure preserved") + print(" - Input/output values preserved") + print(" - Nested workflow defaults preserved") + print(" - Round-trip is stable and idempotent") + result = 0 + else: + print("RESULT: SOME TESTS FAILED") + result = 1 + print("=" * 70) + + # Cleanup + print("\nCleaning up temporary files...") + for temp_file in [roundtrip1_file, roundtrip2_file, nested_export]: + if Path(temp_file).exists(): + Path(temp_file).unlink() + print(f" Removed {temp_file}") + + return result + + +if __name__ == "__main__": + exit(main()) diff --git a/example_workflows/nested/write_aiida.py b/example_workflows/nested/write_aiida.py index 523d8c8..94a78c3 100644 --- a/example_workflows/nested/write_aiida.py +++ b/example_workflows/nested/write_aiida.py @@ -1,55 +1,83 @@ -from aiida_workgraph import task -from aiida import load_profile +from aiida_workgraph import task, WorkGraph, namespace +from aiida import load_profile, orm +from python_workflow_definition.aiida import write_workflow_json +from workflow import get_prod_and_div as _get_prod_and_div, get_sum as _get_sum, get_square as _get_square load_profile() -@task(outputs=["prod", "div"]) -def get_prod_and_div(x, y): - return {"prod": x * y, "div": x / y} +# Wrap the functions with @task decorator +get_prod_and_div = task(outputs=["prod", "div"])(_get_prod_and_div) +get_sum = task(_get_sum) +get_square = task(_get_square) -@task -def get_sum(x, y): - return x + y +# Create nested workflow manually (corresponds to prod_div.json) +nested_wg = WorkGraph( + name="nested_workflow", + inputs=namespace(x=namespace, y=namespace), + outputs=namespace(result=namespace), +) +# Add tasks to nested workflow +t1 = nested_wg.add_task(get_prod_and_div) +t2 = nested_wg.add_task(get_sum) +t3 = nested_wg.add_task(get_square) -@task -def get_square(x): - return x**2 +# Connect nested workflow inputs to first task +nested_wg.add_link(nested_wg.inputs.x, t1.inputs.x) +nested_wg.add_link(nested_wg.inputs.y, t1.inputs.y) +# Connect tasks within nested workflow +nested_wg.add_link(t1.outputs.prod, t2.inputs.x) +nested_wg.add_link(t1.outputs.div, t2.inputs.y) +nested_wg.add_link(t2.outputs.result, t3.inputs.x) -@task.graph -def nested_workflow(x, y): - """Inner workflow from prod_div.json: - - get_prod_and_div(x, y) → prod, div - - get_sum(prod, div) → result - - get_square(result) → result - """ - prod_and_div = get_prod_and_div(x=x, y=y) - sum_result = get_sum(x=prod_and_div.prod, y=prod_and_div.div) - square_result = get_square(x=sum_result.result) - return square_result.result +# Connect nested workflow output +nested_wg.outputs.result = t3.outputs.result +# Set default values for nested workflow inputs +nested_wg.inputs.x.value = orm.Float(1) +nested_wg.inputs.y.value = orm.Float(2) -@task.graph -def main_workflow(a, b, c): - """Outer workflow from main.pwd.json: - - Pre-processing: get_prod_and_div(a, c) → prod, div - - Nested workflow: nested_workflow(prod, div) → result - - Post-processing: get_sum(result, b) → final_result - """ - # Pre-processing step - preprocessing = get_prod_and_div(x=a, y=c) - # Nested workflow - nested_result = nested_workflow(x=preprocessing.prod, y=preprocessing.div) +# Create main workflow (corresponds to main.pwd.json) +main_wg = WorkGraph( + name="main_workflow", + inputs=namespace(a=namespace, b=namespace, c=namespace), + outputs=namespace(final_result=namespace), +) - # Post-processing step - final_result = get_sum(x=nested_result.result, y=b) +# Add tasks to main workflow +preprocessing = main_wg.add_task(get_prod_and_div) +nested_task = main_wg.add_task(nested_wg) # Add the nested workflow as a task +postprocessing = main_wg.add_task(get_sum) - return final_result.result +# Connect main workflow inputs to preprocessing +main_wg.add_link(main_wg.inputs.a, preprocessing.inputs.x) +main_wg.add_link(main_wg.inputs.c, preprocessing.inputs.y) +# Connect preprocessing to nested workflow +main_wg.add_link(preprocessing.outputs.prod, nested_task.inputs.x) +main_wg.add_link(preprocessing.outputs.div, nested_task.inputs.y) -wg = main_workflow.build(a=3, b=2, c=4) -wg.run() +# Connect nested workflow to postprocessing +main_wg.add_link(nested_task.outputs.result, postprocessing.inputs.x) +main_wg.add_link(main_wg.inputs.b, postprocessing.inputs.y) + +# Connect main workflow output +main_wg.outputs.final_result = postprocessing.outputs.result + +# Set default values for main workflow inputs +main_wg.inputs.a.value = orm.Float(3) +main_wg.inputs.b.value = orm.Float(2) +main_wg.inputs.c.value = orm.Float(4) + + +# Export to JSON (will create main_generated.pwd.json and nested_1.json) +print("Exporting workflow to JSON files...") +write_workflow_json(wg=main_wg, file_name="main_generated.pwd.json") +print("✓ Exported to main_generated.pwd.json and nested_1.json") + +# Optionally run the workflow +# main_wg.run() diff --git a/src/python_workflow_definition/aiida.py b/src/python_workflow_definition/aiida.py index f4ebd7f..9c25af7 100644 --- a/src/python_workflow_definition/aiida.py +++ b/src/python_workflow_definition/aiida.py @@ -203,12 +203,23 @@ def load_workflow_json(file_name: str) -> WorkGraph: return wg -def write_workflow_json(wg: WorkGraph, file_name: str) -> dict: +def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = None) -> dict: + """Write a WorkGraph to JSON file(s), with support for nested workflows. + + Args: + wg: The WorkGraph to write + file_name: Output JSON file path + _nested_counter: Internal counter for generating nested workflow filenames + """ + if _nested_counter is None: + _nested_counter = {"count": 0} + data = {NODES_LABEL: [], EDGES_LABEL: []} node_name_mapping = {} data_node_name_mapping = {} i = 0 GRAPH_LEVEL_NAMES = ["graph_inputs", "graph_outputs", "graph_ctx"] + parent_dir = Path(file_name).parent for node in wg.tasks: @@ -217,29 +228,191 @@ def write_workflow_json(wg: WorkGraph, file_name: str) -> dict: node_name_mapping[node.name] = i + # Try to determine if this is a nested WorkGraph or a regular function task executor = node.get_executor() - callable_name = f"{executor.module_path}.{executor.callable_name}" - data[NODES_LABEL].append({"id": i, "type": "function", "value": callable_name}) + # Check if this is a SubGraph-type task (truly nested workflow) + # Note: GraphTask (from @task.graph) is flattened and can't be exported as nested + is_graph = False + nested_wg = None + + # Method 1: Check if this is a SubGraphTask (has spec.node_type == 'SubGraph') + if hasattr(node, 'spec') and hasattr(node.spec, 'node_type'): + if node.spec.node_type == 'SubGraph' and hasattr(node, 'tasks'): + is_graph = True + nested_wg = node + + # Method 2: Check if the node itself has tasks attribute (indicating it's a subgraph) + if not is_graph and hasattr(node, 'tasks'): + # Make sure it has actual tasks (not just an empty list) + tasks_list = [t for t in node.tasks if t.name not in GRAPH_LEVEL_NAMES] + if len(tasks_list) > 0: + is_graph = True + nested_wg = node + + # Method 3: Check if executor is a WorkGraph instance + if not is_graph and isinstance(executor, WorkGraph): + is_graph = True + nested_wg = executor + + if is_graph and nested_wg is not None: + # This is a nested workflow - write it to a separate file + _nested_counter["count"] += 1 + nested_filename = f"nested_{_nested_counter['count']}.json" + nested_path = parent_dir / nested_filename + + # Recursively write the nested workflow + write_workflow_json(nested_wg, str(nested_path), _nested_counter) + + data[NODES_LABEL].append({"id": i, "type": "workflow", "value": nested_filename}) + else: + # This is a regular function task + # Try to get the module path from different sources + module_path = executor.module_path + + # If module_path is None, try to extract from pickled_callable + if module_path is None and hasattr(executor, 'pickled_callable'): + # For pickled callables, try to get the original function + try: + import cloudpickle + func = cloudpickle.loads(executor.pickled_callable) + if hasattr(func, '__module__'): + module_path = func.__module__ + except Exception: + pass # Keep module_path as None + + callable_name = f"{module_path}.{executor.callable_name}" + data[NODES_LABEL].append({"id": i, "type": "function", "value": callable_name}) + i += 1 + # Handle workflow-level inputs (create input nodes) + input_name_mapping = {} + INTERNAL_SOCKETS = ['metadata', '_wait', '_outputs', 'function_data', 'function_inputs'] + + # First, try to get default values from graph_inputs task (for SubGraphTasks) + graph_inputs_defaults = {} + for task in wg.tasks: + if task.name == 'graph_inputs' and hasattr(task, 'outputs'): + for output in task.outputs: + if hasattr(output, '_name') and hasattr(output, 'value'): + output_name = output._name + if output.value is not None and isinstance(output.value, orm.Data): + if isinstance(output.value, orm.List): + graph_inputs_defaults[output_name] = output.value.get_list() + elif isinstance(output.value, orm.Dict): + val = output.value.get_dict() + val.pop("node_type", None) + graph_inputs_defaults[output_name] = val + else: + val = output.value.value + # Convert float to int if it's a whole number + if isinstance(val, float) and val.is_integer(): + val = int(val) + graph_inputs_defaults[output_name] = val + + if hasattr(wg, 'inputs') and wg.inputs is not None and hasattr(wg.inputs, '_sockets'): + for input_name, input_socket in wg.inputs._sockets.items(): + # Skip metadata and other special namespaces/internal sockets + if isinstance(input_socket, TaskSocketNamespace): + continue + if input_name in INTERNAL_SOCKETS or input_name.startswith('_'): + continue + + # Check if this input has a default value + # First try graph_inputs defaults, then the socket value + input_value = None + if input_name in graph_inputs_defaults: + input_value = graph_inputs_defaults[input_name] + elif hasattr(input_socket, 'value') and input_socket.value is not None: + if isinstance(input_socket.value, orm.Data): + if isinstance(input_socket.value, orm.List): + input_value = input_socket.value.get_list() + elif isinstance(input_socket.value, orm.Dict): + input_value = input_socket.value.get_dict() + input_value.pop("node_type", None) + else: + input_value = input_socket.value.value + # Convert float to int if it's a whole number + if isinstance(input_value, float) and input_value.is_integer(): + input_value = int(input_value) + + # Create input node + node_data = {"id": i, "type": "input", "name": input_name} + if input_value is not None: + node_data["value"] = input_value + data[NODES_LABEL].append(node_data) + input_name_mapping[input_name] = i + i += 1 + + # Handle workflow-level outputs (create output nodes) + output_name_mapping = {} + if hasattr(wg, 'outputs') and wg.outputs is not None and hasattr(wg.outputs, '_sockets'): + for output_name, output_socket in wg.outputs._sockets.items(): + # Skip metadata and other special namespaces/internal sockets + if isinstance(output_socket, TaskSocketNamespace): + continue + if output_name in INTERNAL_SOCKETS or output_name.startswith('_'): + continue + + data[NODES_LABEL].append({"id": i, "type": "output", "name": output_name}) + output_name_mapping[output_name] = i + i += 1 + for link in wg.links: link_data = link.to_dict() - # if the from socket is the default result, we set it to None - if link_data["from_socket"] == "result": - link_data["from_socket"] = None - link_data[TARGET_LABEL] = node_name_mapping[link_data.pop("to_node")] - link_data[TARGET_PORT_LABEL] = link_data.pop("to_socket") - link_data[SOURCE_LABEL] = node_name_mapping[link_data.pop("from_node")] - link_data[SOURCE_PORT_LABEL] = link_data.pop("from_socket") - data[EDGES_LABEL].append(link_data) + from_node_name = link_data.pop("from_node") + to_node_name = link_data.pop("to_node") + from_socket = link_data.pop("from_socket") + to_socket = link_data.pop("to_socket") + + # Handle links from graph_inputs + if from_node_name == "graph_inputs": + if from_socket in input_name_mapping: + link_data[SOURCE_LABEL] = input_name_mapping[from_socket] + link_data[SOURCE_PORT_LABEL] = None + else: + continue + else: + link_data[SOURCE_LABEL] = node_name_mapping.get(from_node_name) + # if the from socket is the default result, we set it to None + link_data[SOURCE_PORT_LABEL] = None if from_socket == "result" else from_socket + + # Handle links to graph_outputs + if to_node_name == "graph_outputs": + if to_socket in output_name_mapping: + link_data[TARGET_LABEL] = output_name_mapping[to_socket] + link_data[TARGET_PORT_LABEL] = None + else: + continue + else: + link_data[TARGET_LABEL] = node_name_mapping.get(to_node_name) + link_data[TARGET_PORT_LABEL] = to_socket + + # Only add link if both source and target are valid + if link_data[SOURCE_LABEL] is not None and link_data[TARGET_LABEL] is not None: + data[EDGES_LABEL].append(link_data) + + # Build set of links that are already handled (to avoid duplicates) + existing_links = { + (link[SOURCE_LABEL], link[TARGET_LABEL], link[TARGET_PORT_LABEL]) + for link in data[EDGES_LABEL] + } for node in wg.tasks: + if node.name in GRAPH_LEVEL_NAMES: + continue + for input in node.inputs: # assume namespace is not used as input if isinstance(input, TaskSocketNamespace): continue if isinstance(input.value, orm.Data): + # Check if this input is already connected (e.g., from workflow inputs) + node_id = node_name_mapping[node.name] + if any(link[1] == node_id and link[2] == input._name for link in existing_links): + continue + if input.value.uuid not in data_node_name_mapping: if isinstance(input.value, orm.List): raw_value = input.value.get_list() @@ -249,6 +422,9 @@ def write_workflow_json(wg: WorkGraph, file_name: str) -> dict: raw_value.pop("node_type", None) else: raw_value = input.value.value + # Convert float to int if it's a whole number + if isinstance(raw_value, float) and raw_value.is_integer(): + raw_value = int(raw_value) data[NODES_LABEL].append( {"id": i, "type": "input", "value": raw_value} ) @@ -265,8 +441,21 @@ def write_workflow_json(wg: WorkGraph, file_name: str) -> dict: SOURCE_PORT_LABEL: None, } ) + existing_links.add((input_node_name, node_name_mapping[node.name], input._name)) data[VERSION_LABEL] = VERSION_NUMBER - PythonWorkflowDefinitionWorkflow( - **set_result_node(workflow_dict=update_node_names(workflow_dict=data)) - ).dump_json_file(file_name=file_name, indent=2) + + # Check if we have named input nodes (from workflow-level inputs) + has_named_inputs = any( + node.get("type") == "input" and "name" in node for node in data[NODES_LABEL] + ) + has_output_nodes = any(node.get("type") == "output" for node in data[NODES_LABEL]) + + if has_named_inputs or has_output_nodes: + # New-style workflow with exposed inputs/outputs - names are already set, don't rename + workflow_data = data + else: + # Old-style workflow - need to update names and add result node + workflow_data = set_result_node(workflow_dict=update_node_names(workflow_dict=data)) + + PythonWorkflowDefinitionWorkflow(**workflow_data).dump_json_file(file_name=file_name, indent=2) diff --git a/src/python_workflow_definition/shared.py b/src/python_workflow_definition/shared.py index ef7bb23..9d1c5b6 100644 --- a/src/python_workflow_definition/shared.py +++ b/src/python_workflow_definition/shared.py @@ -6,7 +6,7 @@ SOURCE_PORT_LABEL = "sourcePort" TARGET_LABEL = "target" TARGET_PORT_LABEL = "targetPort" -VERSION_NUMBER = "0.1.0" +VERSION_NUMBER = "0.1.1" VERSION_LABEL = "version" From 75b8774238aa016ffa9dff44c31791fe7d7f9eb1 Mon Sep 17 00:00:00 2001 From: Julian Geiger Date: Fri, 24 Oct 2025 11:48:44 +0200 Subject: [PATCH 10/11] add test for nested workflow to ci infrastructure --- .github/workflows/pipeline.yml | 24 + example_workflows/nested/aiida.ipynb | 714 +++++++++++++++++++++++++++ 2 files changed, 738 insertions(+) create mode 100644 example_workflows/nested/aiida.ipynb diff --git a/.github/workflows/pipeline.yml b/.github/workflows/pipeline.yml index 8c07cf8..add96a9 100644 --- a/.github/workflows/pipeline.yml +++ b/.github/workflows/pipeline.yml @@ -138,6 +138,30 @@ jobs: papermill pyiron_workflow.ipynb pyiron_workflow_out.ipynb -k "python3" papermill universal_workflow.ipynb universal_workflow_out.ipynb -k "python3" + nested: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Conda config + run: echo -e "channels:\n - conda-forge\n" > .condarc + - uses: conda-incubator/setup-miniconda@v3 + with: + python-version: "3.12" + miniforge-version: latest + condarc-file: .condarc + environment-file: binder/environment.yml + - name: Installation and setup + shell: bash -l {0} + run: | + pip install --no-deps --no-build-isolation -e . + conda install -c conda-forge jupyter papermill + verdi presto --profile-name pwd + - name: Tests + shell: bash -l {0} + run: | + cd example_workflows/nested + papermill aiida.ipynb aiida_out.ipynb -k "python3" + documentation: runs-on: ubuntu-latest steps: diff --git a/example_workflows/nested/aiida.ipynb b/example_workflows/nested/aiida.ipynb new file mode 100644 index 0000000..566890e --- /dev/null +++ b/example_workflows/nested/aiida.ipynb @@ -0,0 +1,714 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# Nested Workflows with AiiDA\n", + "\n", + "This notebook demonstrates nested workflow support with load and write functionality." + ] + }, + { + "cell_type": "markdown", + "id": "1", + "metadata": {}, + "source": [ + "## Define Nested Workflow with AiiDA" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "2", + "metadata": {}, + "outputs": [], + "source": [ + "from python_workflow_definition.aiida import write_workflow_json, load_workflow_json\n", + "\n", + "from aiida_workgraph import WorkGraph, task, namespace\n", + "from aiida import orm, load_profile\n", + "\n", + "load_profile()\n", + "\n", + "workflow_json_filename = \"nested_test.pwd.json\"" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "3", + "metadata": {}, + "outputs": [], + "source": [ + "from workflow import (\n", + " get_sum as _get_sum,\n", + " get_prod_and_div as _get_prod_and_div,\n", + " get_square as _get_square,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "4", + "metadata": {}, + "outputs": [], + "source": [ + "# Wrap the functions with @task decorator\n", + "get_prod_and_div = task(outputs=[\"prod\", \"div\"])(_get_prod_and_div)\n", + "get_sum = task(_get_sum)\n", + "get_square = task(_get_square)" + ] + }, + { + "cell_type": "markdown", + "id": "5", + "metadata": {}, + "source": [ + "### Create Nested Workflow" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "6", + "metadata": {}, + "outputs": [], + "source": [ + "# Create nested workflow manually (corresponds to prod_div.json)\n", + "nested_wg = WorkGraph(\n", + " name=\"nested_workflow\",\n", + " inputs=namespace(x=namespace, y=namespace),\n", + " outputs=namespace(result=namespace),\n", + ")\n", + "\n", + "# Add tasks to nested workflow\n", + "t1 = nested_wg.add_task(get_prod_and_div)\n", + "t2 = nested_wg.add_task(get_sum)\n", + "t3 = nested_wg.add_task(get_square)\n", + "\n", + "# Connect nested workflow inputs to first task\n", + "nested_wg.add_link(nested_wg.inputs.x, t1.inputs.x)\n", + "nested_wg.add_link(nested_wg.inputs.y, t1.inputs.y)\n", + "\n", + "# Connect tasks within nested workflow\n", + "nested_wg.add_link(t1.outputs.prod, t2.inputs.x)\n", + "nested_wg.add_link(t1.outputs.div, t2.inputs.y)\n", + "nested_wg.add_link(t2.outputs.result, t3.inputs.x)\n", + "\n", + "# Connect nested workflow output\n", + "nested_wg.outputs.result = t3.outputs.result\n", + "\n", + "# Set default values for nested workflow inputs\n", + "nested_wg.inputs.x.value = orm.Float(1)\n", + "nested_wg.inputs.y.value = orm.Float(2)" + ] + }, + { + "cell_type": "markdown", + "id": "7", + "metadata": {}, + "source": [ + "### Create Main Workflow with Nested Workflow" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "8", + "metadata": {}, + "outputs": [], + "source": [ + "# Create main workflow (corresponds to main.pwd.json)\n", + "main_wg = WorkGraph(\n", + " name=\"main_workflow\",\n", + " inputs=namespace(a=namespace, b=namespace, c=namespace),\n", + " outputs=namespace(final_result=namespace),\n", + ")\n", + "\n", + "# Add tasks to main workflow\n", + "preprocessing = main_wg.add_task(get_prod_and_div)\n", + "nested_task = main_wg.add_task(nested_wg) # Add the nested workflow as a task\n", + "postprocessing = main_wg.add_task(get_sum)\n", + "\n", + "# Connect main workflow inputs to preprocessing\n", + "main_wg.add_link(main_wg.inputs.a, preprocessing.inputs.x)\n", + "main_wg.add_link(main_wg.inputs.c, preprocessing.inputs.y)\n", + "\n", + "# Connect preprocessing to nested workflow\n", + "main_wg.add_link(preprocessing.outputs.prod, nested_task.inputs.x)\n", + "main_wg.add_link(preprocessing.outputs.div, nested_task.inputs.y)\n", + "\n", + "# Connect nested workflow to postprocessing\n", + "main_wg.add_link(nested_task.outputs.result, postprocessing.inputs.x)\n", + "main_wg.add_link(main_wg.inputs.b, postprocessing.inputs.y)\n", + "\n", + "# Connect main workflow output\n", + "main_wg.outputs.final_result = postprocessing.outputs.result\n", + "\n", + "# Set default values for main workflow inputs\n", + "main_wg.inputs.a.value = orm.Float(3)\n", + "main_wg.inputs.b.value = orm.Float(2)\n", + "main_wg.inputs.c.value = orm.Float(4)" + ] + }, + { + "cell_type": "markdown", + "id": "9", + "metadata": {}, + "source": [ + "### Export Workflow to JSON" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "10", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Exported workflow to nested_test.pwd.json\n" + ] + } + ], + "source": [ + "write_workflow_json(wg=main_wg, file_name=workflow_json_filename)\n", + "print(f\"Exported workflow to {workflow_json_filename}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "11", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[38;5;238m───────┬────────────────────────────────────────────────────────────────────────\u001b[0m\n", + " \u001b[38;5;238m│ \u001b[0mFile: \u001b[1mnested_test.pwd.json\u001b[0m\n", + "\u001b[38;5;238m───────┼────────────────────────────────────────────────────────────────────────\u001b[0m\n", + "\u001b[38;5;238m 1\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 2\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mversion\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186m0.1.1\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 3\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mnodes\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;231m[\u001b[0m\n", + "\u001b[38;5;238m 4\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 5\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 6\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mfunction\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 7\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mworkflow.get_prod_and_div\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 8\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 9\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 10\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 11\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mworkflow\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 12\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mnested_1.json\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 13\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 14\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 15\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 16\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mfunction\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 17\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mworkflow.get_sum\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 18\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 19\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 20\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m3\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 21\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186minput\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 22\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mname\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186ma\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 23\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m3\u001b[0m\n", + "\u001b[38;5;238m 24\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 25\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 26\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m4\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 27\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186minput\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 28\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mname\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mb\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 29\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\n", + "\u001b[38;5;238m 30\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 31\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 32\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m5\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 33\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186minput\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 34\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mname\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mc\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 35\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m4\u001b[0m\n", + "\u001b[38;5;238m 36\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 37\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 38\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m6\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 39\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186moutput\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 40\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mname\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mfinal_result\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 41\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\n", + "\u001b[38;5;238m 42\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m]\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 43\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208medges\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;231m[\u001b[0m\n", + "\u001b[38;5;238m 44\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 45\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 46\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mx\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 47\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m3\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 48\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 49\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 50\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 51\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 52\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186my\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 53\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m5\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 54\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 55\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 56\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 57\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 58\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mx\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 59\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 60\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mprod\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 61\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 62\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 63\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 64\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186my\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 65\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 66\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mdiv\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 67\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 68\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 69\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 70\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mx\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 71\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 72\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 73\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 74\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 75\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 76\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186my\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 77\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m4\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 78\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 79\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 80\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 81\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m6\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 82\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 83\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 84\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 85\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\n", + "\u001b[38;5;238m 86\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m]\u001b[0m\n", + "\u001b[38;5;238m 87\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m}\u001b[0m\n", + "\u001b[38;5;238m───────┴────────────────────────────────────────────────────────────────────────\u001b[0m\n" + ] + } + ], + "source": [ + "!cat {workflow_json_filename}" + ] + }, + { + "cell_type": "markdown", + "id": "12", + "metadata": {}, + "source": [ + "### Check Nested Workflow File" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "13", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[38;5;238m───────┬────────────────────────────────────────────────────────────────────────\u001b[0m\n", + " \u001b[38;5;238m│ \u001b[0mFile: \u001b[1mnested_1.json\u001b[0m\n", + "\u001b[38;5;238m───────┼────────────────────────────────────────────────────────────────────────\u001b[0m\n", + "\u001b[38;5;238m 1\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 2\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mversion\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186m0.1.1\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 3\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mnodes\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;231m[\u001b[0m\n", + "\u001b[38;5;238m 4\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 5\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 6\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mfunction\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 7\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mworkflow.get_prod_and_div\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 8\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 9\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 10\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 11\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mfunction\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 12\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mworkflow.get_sum\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 13\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 14\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 15\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 16\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mfunction\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 17\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mworkflow.get_square\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 18\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 19\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 20\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m3\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 21\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186minput\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 22\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mname\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mx\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 23\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\n", + "\u001b[38;5;238m 24\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 25\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 26\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m4\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 27\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186minput\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 28\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mname\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186my\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 29\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\n", + "\u001b[38;5;238m 30\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 31\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 32\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m5\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 33\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186moutput\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 34\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mname\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mresult\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 35\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\n", + "\u001b[38;5;238m 36\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m]\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 37\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208medges\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;231m[\u001b[0m\n", + "\u001b[38;5;238m 38\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 39\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 40\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mx\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 41\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m3\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 42\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 43\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 44\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 45\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 46\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186my\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 47\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m4\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 48\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 49\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 50\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 51\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 52\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mx\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 53\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 54\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mprod\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 55\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 56\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 57\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 58\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186my\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 59\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 60\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mdiv\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 61\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 62\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 63\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 64\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mx\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 65\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 66\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 67\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 68\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 69\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m5\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 70\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 71\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 72\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 73\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\n", + "\u001b[38;5;238m 74\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m]\u001b[0m\n", + "\u001b[38;5;238m 75\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m}\u001b[0m\n", + "\u001b[38;5;238m───────┴────────────────────────────────────────────────────────────────────────\u001b[0m\n" + ] + } + ], + "source": [ + "!cat nested_1.json" + ] + }, + { + "cell_type": "markdown", + "id": "14", + "metadata": {}, + "source": [ + "## Load and Verify Workflow" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "15", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loaded workflow: WorkGraph\n", + "Number of tasks: 3\n", + "\n", + "Inputs:\n", + " a = 3\n", + " b = 2\n", + " c = 4\n", + "\n", + "Nested workflows:\n", + " Found 'WorkGraph' with 3 tasks\n", + " Default inputs:\n", + " x = 1\n", + " y = 2\n" + ] + } + ], + "source": [ + "# Load the workflow back\n", + "wg_loaded = load_workflow_json(workflow_json_filename)\n", + "\n", + "print(f\"Loaded workflow: {wg_loaded.name}\")\n", + "print(f\"Number of tasks: {len([t for t in wg_loaded.tasks if t.name not in ['graph_inputs', 'graph_outputs', 'graph_ctx']])}\")\n", + "\n", + "# Check inputs\n", + "print(\"\\nInputs:\")\n", + "for name, socket in wg_loaded.inputs._sockets.items():\n", + " if not name.startswith('_') and name != 'metadata':\n", + " if hasattr(socket, 'value') and socket.value is not None:\n", + " value = socket.value.value if hasattr(socket.value, 'value') else socket.value\n", + " print(f\" {name} = {value}\")\n", + "\n", + "# Check for nested workflows\n", + "print(\"\\nNested workflows:\")\n", + "for task in wg_loaded.tasks:\n", + " if hasattr(task, 'tasks'):\n", + " nested_tasks = [t for t in task.tasks if t.name not in ['graph_inputs', 'graph_outputs', 'graph_ctx']]\n", + " if len(nested_tasks) > 0:\n", + " print(f\" Found '{task.name}' with {len(nested_tasks)} tasks\")\n", + " # Check nested workflow defaults\n", + " for subtask in task.tasks:\n", + " if subtask.name == 'graph_inputs' and hasattr(subtask, 'outputs'):\n", + " print(\" Default inputs:\")\n", + " for out in subtask.outputs:\n", + " if hasattr(out, '_name') and not out._name.startswith('_'):\n", + " value = out.value.value if hasattr(out.value, 'value') else out.value\n", + " print(f\" {out._name} = {value}\")" + ] + }, + { + "cell_type": "markdown", + "id": "16", + "metadata": {}, + "source": [ + "## Round-Trip Test" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "17", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Round-trip test: PASS\n", + "Workflow export/import is stable and idempotent\n" + ] + } + ], + "source": [ + "import json\n", + "from pathlib import Path\n", + "\n", + "# Export the loaded workflow again\n", + "roundtrip_file = \"nested_roundtrip.pwd.json\"\n", + "write_workflow_json(wg_loaded, roundtrip_file)\n", + "\n", + "# Compare the two exports\n", + "with open(workflow_json_filename) as f1, open(roundtrip_file) as f2:\n", + " data1 = json.load(f1)\n", + " data2 = json.load(f2)\n", + "\n", + "match = json.dumps(data1, sort_keys=True) == json.dumps(data2, sort_keys=True)\n", + "print(f\"Round-trip test: {'PASS' if match else 'FAIL'}\")\n", + "\n", + "if not match:\n", + " print(\"\\nDifferences found!\")\n", + " raise AssertionError(\"Round-trip test failed\")\n", + "else:\n", + " print(\"Workflow export/import is stable and idempotent\")" + ] + }, + { + "cell_type": "markdown", + "id": "18", + "metadata": {}, + "source": [ + "## Load Workflow with Other Frameworks" + ] + }, + { + "cell_type": "markdown", + "id": "19", + "metadata": {}, + "source": [ + "### Load Workflow with jobflow" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "20", + "metadata": {}, + "outputs": [], + "source": [ + "# from python_workflow_definition.jobflow import load_workflow_json" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "21", + "metadata": {}, + "outputs": [], + "source": [ + "# from jobflow.managers.local import run_locally" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "22", + "metadata": {}, + "outputs": [ + { + "ename": "ModuleNotFoundError", + "evalue": "No module named 'nested_1'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[13], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m flow \u001b[38;5;241m=\u001b[39m \u001b[43mload_workflow_json\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfile_name\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mworkflow_json_filename\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/aiida_projects/adis/git-repos/python-workflow-definition/src/python_workflow_definition/jobflow.py:301\u001b[0m, in \u001b[0;36mload_workflow_json\u001b[0;34m(file_name)\u001b[0m\n\u001b[1;32m 299\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(v, \u001b[38;5;28mstr\u001b[39m) \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01min\u001b[39;00m v:\n\u001b[1;32m 300\u001b[0m p, m \u001b[38;5;241m=\u001b[39m v\u001b[38;5;241m.\u001b[39mrsplit(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;241m1\u001b[39m)\n\u001b[0;32m--> 301\u001b[0m mod \u001b[38;5;241m=\u001b[39m \u001b[43mimport_module\u001b[49m\u001b[43m(\u001b[49m\u001b[43mp\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 302\u001b[0m nodes_new_dict[\u001b[38;5;28mint\u001b[39m(k)] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mgetattr\u001b[39m(mod, m)\n\u001b[1;32m 303\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", + "File \u001b[0;32m/usr/lib/python3.10/importlib/__init__.py:126\u001b[0m, in \u001b[0;36mimport_module\u001b[0;34m(name, package)\u001b[0m\n\u001b[1;32m 124\u001b[0m \u001b[38;5;28;01mbreak\u001b[39;00m\n\u001b[1;32m 125\u001b[0m level \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m--> 126\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_bootstrap\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_gcd_import\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m[\u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m:\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpackage\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m:1050\u001b[0m, in \u001b[0;36m_gcd_import\u001b[0;34m(name, package, level)\u001b[0m\n", + "File \u001b[0;32m:1027\u001b[0m, in \u001b[0;36m_find_and_load\u001b[0;34m(name, import_)\u001b[0m\n", + "File \u001b[0;32m:1004\u001b[0m, in \u001b[0;36m_find_and_load_unlocked\u001b[0;34m(name, import_)\u001b[0m\n", + "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'nested_1'" + ] + } + ], + "source": [ + "# flow = load_workflow_json(file_name=workflow_json_filename)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "23", + "metadata": {}, + "outputs": [], + "source": [ + "result = run_locally(flow)\n", + "result" + ] + }, + { + "cell_type": "markdown", + "id": "24", + "metadata": {}, + "source": [ + "### Load Workflow with pyiron_base" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "25", + "metadata": {}, + "outputs": [], + "source": [ + "from python_workflow_definition.pyiron_base import load_workflow_json" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "26", + "metadata": {}, + "outputs": [], + "source": [ + "delayed_object_lst = load_workflow_json(file_name=workflow_json_filename)\n", + "delayed_object_lst[-1].draw()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "27", + "metadata": {}, + "outputs": [], + "source": [ + "delayed_object_lst[-1].pull()" + ] + }, + { + "cell_type": "markdown", + "id": "28", + "metadata": {}, + "source": [ + "### Load Workflow with pyiron_workflow" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "29", + "metadata": {}, + "outputs": [], + "source": [ + "from python_workflow_definition.pyiron_workflow import load_workflow_json" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "30", + "metadata": {}, + "outputs": [], + "source": [ + "wf = load_workflow_json(file_name=workflow_json_filename)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "31", + "metadata": {}, + "outputs": [], + "source": [ + "wf.draw(size=(10, 10))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "32", + "metadata": {}, + "outputs": [], + "source": [ + "wf.run()" + ] + }, + { + "cell_type": "markdown", + "id": "33", + "metadata": {}, + "source": [ + "## Cleanup" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "34", + "metadata": {}, + "outputs": [], + "source": [ + "# Clean up test files\n", + "import os\n", + "for f in [workflow_json_filename, roundtrip_file, \"nested_1.json\"]:\n", + " if os.path.exists(f):\n", + " os.remove(f)\n", + " print(f\"Removed {f}\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "PWD", + "language": "python", + "name": "pwd" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From 08a8d275a81cf3b609121175114246dde925f1d8 Mon Sep 17 00:00:00 2001 From: Julian Geiger Date: Fri, 24 Oct 2025 12:15:20 +0200 Subject: [PATCH 11/11] fix ci (try at least) --- example_workflows/nested/aiida.ipynb | 69 +++++++++++------------ src/python_workflow_definition/aiida.py | 74 ++++++++++++++++++------- 2 files changed, 84 insertions(+), 59 deletions(-) diff --git a/example_workflows/nested/aiida.ipynb b/example_workflows/nested/aiida.ipynb index 566890e..bf8c250 100644 --- a/example_workflows/nested/aiida.ipynb +++ b/example_workflows/nested/aiida.ipynb @@ -546,37 +546,20 @@ "execution_count": 13, "id": "22", "metadata": {}, - "outputs": [ - { - "ename": "ModuleNotFoundError", - "evalue": "No module named 'nested_1'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[13], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m flow \u001b[38;5;241m=\u001b[39m \u001b[43mload_workflow_json\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfile_name\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mworkflow_json_filename\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/aiida_projects/adis/git-repos/python-workflow-definition/src/python_workflow_definition/jobflow.py:301\u001b[0m, in \u001b[0;36mload_workflow_json\u001b[0;34m(file_name)\u001b[0m\n\u001b[1;32m 299\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(v, \u001b[38;5;28mstr\u001b[39m) \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01min\u001b[39;00m v:\n\u001b[1;32m 300\u001b[0m p, m \u001b[38;5;241m=\u001b[39m v\u001b[38;5;241m.\u001b[39mrsplit(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;241m1\u001b[39m)\n\u001b[0;32m--> 301\u001b[0m mod \u001b[38;5;241m=\u001b[39m \u001b[43mimport_module\u001b[49m\u001b[43m(\u001b[49m\u001b[43mp\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 302\u001b[0m nodes_new_dict[\u001b[38;5;28mint\u001b[39m(k)] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mgetattr\u001b[39m(mod, m)\n\u001b[1;32m 303\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", - "File \u001b[0;32m/usr/lib/python3.10/importlib/__init__.py:126\u001b[0m, in \u001b[0;36mimport_module\u001b[0;34m(name, package)\u001b[0m\n\u001b[1;32m 124\u001b[0m \u001b[38;5;28;01mbreak\u001b[39;00m\n\u001b[1;32m 125\u001b[0m level \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m--> 126\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_bootstrap\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_gcd_import\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m[\u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m:\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpackage\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m:1050\u001b[0m, in \u001b[0;36m_gcd_import\u001b[0;34m(name, package, level)\u001b[0m\n", - "File \u001b[0;32m:1027\u001b[0m, in \u001b[0;36m_find_and_load\u001b[0;34m(name, import_)\u001b[0m\n", - "File \u001b[0;32m:1004\u001b[0m, in \u001b[0;36m_find_and_load_unlocked\u001b[0;34m(name, import_)\u001b[0m\n", - "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'nested_1'" - ] - } - ], + "outputs": [], "source": [ "# flow = load_workflow_json(file_name=workflow_json_filename)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "id": "23", "metadata": {}, "outputs": [], "source": [ - "result = run_locally(flow)\n", - "result" + "# result = run_locally(flow)\n", + "# result" ] }, { @@ -589,33 +572,33 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "id": "25", "metadata": {}, "outputs": [], "source": [ - "from python_workflow_definition.pyiron_base import load_workflow_json" + "# from python_workflow_definition.pyiron_base import load_workflow_json" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 16, "id": "26", "metadata": {}, "outputs": [], "source": [ - "delayed_object_lst = load_workflow_json(file_name=workflow_json_filename)\n", - "delayed_object_lst[-1].draw()" + "# delayed_object_lst = load_workflow_json(file_name=workflow_json_filename)\n", + "# delayed_object_lst[-1].draw()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 17, "id": "27", "metadata": {}, "outputs": [], "source": [ - "delayed_object_lst[-1].pull()" + "# delayed_object_lst[-1].pull()" ] }, { @@ -628,42 +611,42 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 18, "id": "29", "metadata": {}, "outputs": [], "source": [ - "from python_workflow_definition.pyiron_workflow import load_workflow_json" + "# from python_workflow_definition.pyiron_workflow import load_workflow_json" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 19, "id": "30", "metadata": {}, "outputs": [], "source": [ - "wf = load_workflow_json(file_name=workflow_json_filename)" + "# wf = load_workflow_json(file_name=workflow_json_filename)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 20, "id": "31", "metadata": {}, "outputs": [], "source": [ - "wf.draw(size=(10, 10))" + "# wf.draw(size=(10, 10))" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 21, "id": "32", "metadata": {}, "outputs": [], "source": [ - "wf.run()" + "# wf.run()" ] }, { @@ -676,10 +659,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 22, "id": "34", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Removed nested_test.pwd.json\n", + "Removed nested_roundtrip.pwd.json\n", + "Removed nested_1.json\n" + ] + } + ], "source": [ "# Clean up test files\n", "import os\n", diff --git a/src/python_workflow_definition/aiida.py b/src/python_workflow_definition/aiida.py index 9c25af7..a2e22d9 100644 --- a/src/python_workflow_definition/aiida.py +++ b/src/python_workflow_definition/aiida.py @@ -203,7 +203,9 @@ def load_workflow_json(file_name: str) -> WorkGraph: return wg -def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = None) -> dict: +def write_workflow_json( + wg: WorkGraph, file_name: str, _nested_counter: dict = None +) -> dict: """Write a WorkGraph to JSON file(s), with support for nested workflows. Args: @@ -237,13 +239,13 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N nested_wg = None # Method 1: Check if this is a SubGraphTask (has spec.node_type == 'SubGraph') - if hasattr(node, 'spec') and hasattr(node.spec, 'node_type'): - if node.spec.node_type == 'SubGraph' and hasattr(node, 'tasks'): + if hasattr(node, "spec") and hasattr(node.spec, "node_type"): + if node.spec.node_type == "SubGraph" and hasattr(node, "tasks"): is_graph = True nested_wg = node # Method 2: Check if the node itself has tasks attribute (indicating it's a subgraph) - if not is_graph and hasattr(node, 'tasks'): + if not is_graph and hasattr(node, "tasks"): # Make sure it has actual tasks (not just an empty list) tasks_list = [t for t in node.tasks if t.name not in GRAPH_LEVEL_NAMES] if len(tasks_list) > 0: @@ -264,38 +266,49 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N # Recursively write the nested workflow write_workflow_json(nested_wg, str(nested_path), _nested_counter) - data[NODES_LABEL].append({"id": i, "type": "workflow", "value": nested_filename}) + data[NODES_LABEL].append( + {"id": i, "type": "workflow", "value": nested_filename} + ) else: # This is a regular function task # Try to get the module path from different sources module_path = executor.module_path # If module_path is None, try to extract from pickled_callable - if module_path is None and hasattr(executor, 'pickled_callable'): + if module_path is None and hasattr(executor, "pickled_callable"): # For pickled callables, try to get the original function try: import cloudpickle + func = cloudpickle.loads(executor.pickled_callable) - if hasattr(func, '__module__'): + if hasattr(func, "__module__"): module_path = func.__module__ except Exception: pass # Keep module_path as None callable_name = f"{module_path}.{executor.callable_name}" - data[NODES_LABEL].append({"id": i, "type": "function", "value": callable_name}) + data[NODES_LABEL].append( + {"id": i, "type": "function", "value": callable_name} + ) i += 1 # Handle workflow-level inputs (create input nodes) input_name_mapping = {} - INTERNAL_SOCKETS = ['metadata', '_wait', '_outputs', 'function_data', 'function_inputs'] + INTERNAL_SOCKETS = [ + "metadata", + "_wait", + "_outputs", + "function_data", + "function_inputs", + ] # First, try to get default values from graph_inputs task (for SubGraphTasks) graph_inputs_defaults = {} for task in wg.tasks: - if task.name == 'graph_inputs' and hasattr(task, 'outputs'): + if task.name == "graph_inputs" and hasattr(task, "outputs"): for output in task.outputs: - if hasattr(output, '_name') and hasattr(output, 'value'): + if hasattr(output, "_name") and hasattr(output, "value"): output_name = output._name if output.value is not None and isinstance(output.value, orm.Data): if isinstance(output.value, orm.List): @@ -311,12 +324,16 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N val = int(val) graph_inputs_defaults[output_name] = val - if hasattr(wg, 'inputs') and wg.inputs is not None and hasattr(wg.inputs, '_sockets'): + if ( + hasattr(wg, "inputs") + and wg.inputs is not None + and hasattr(wg.inputs, "_sockets") + ): for input_name, input_socket in wg.inputs._sockets.items(): # Skip metadata and other special namespaces/internal sockets if isinstance(input_socket, TaskSocketNamespace): continue - if input_name in INTERNAL_SOCKETS or input_name.startswith('_'): + if input_name in INTERNAL_SOCKETS or input_name.startswith("_"): continue # Check if this input has a default value @@ -324,7 +341,7 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N input_value = None if input_name in graph_inputs_defaults: input_value = graph_inputs_defaults[input_name] - elif hasattr(input_socket, 'value') and input_socket.value is not None: + elif hasattr(input_socket, "value") and input_socket.value is not None: if isinstance(input_socket.value, orm.Data): if isinstance(input_socket.value, orm.List): input_value = input_socket.value.get_list() @@ -347,12 +364,16 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N # Handle workflow-level outputs (create output nodes) output_name_mapping = {} - if hasattr(wg, 'outputs') and wg.outputs is not None and hasattr(wg.outputs, '_sockets'): + if ( + hasattr(wg, "outputs") + and wg.outputs is not None + and hasattr(wg.outputs, "_sockets") + ): for output_name, output_socket in wg.outputs._sockets.items(): # Skip metadata and other special namespaces/internal sockets if isinstance(output_socket, TaskSocketNamespace): continue - if output_name in INTERNAL_SOCKETS or output_name.startswith('_'): + if output_name in INTERNAL_SOCKETS or output_name.startswith("_"): continue data[NODES_LABEL].append({"id": i, "type": "output", "name": output_name}) @@ -376,7 +397,9 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N else: link_data[SOURCE_LABEL] = node_name_mapping.get(from_node_name) # if the from socket is the default result, we set it to None - link_data[SOURCE_PORT_LABEL] = None if from_socket == "result" else from_socket + link_data[SOURCE_PORT_LABEL] = ( + None if from_socket == "result" else from_socket + ) # Handle links to graph_outputs if to_node_name == "graph_outputs": @@ -410,7 +433,10 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N if isinstance(input.value, orm.Data): # Check if this input is already connected (e.g., from workflow inputs) node_id = node_name_mapping[node.name] - if any(link[1] == node_id and link[2] == input._name for link in existing_links): + if any( + link[1] == node_id and link[2] == input._name + for link in existing_links + ): continue if input.value.uuid not in data_node_name_mapping: @@ -441,7 +467,9 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N SOURCE_PORT_LABEL: None, } ) - existing_links.add((input_node_name, node_name_mapping[node.name], input._name)) + existing_links.add( + (input_node_name, node_name_mapping[node.name], input._name) + ) data[VERSION_LABEL] = VERSION_NUMBER @@ -456,6 +484,10 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N workflow_data = data else: # Old-style workflow - need to update names and add result node - workflow_data = set_result_node(workflow_dict=update_node_names(workflow_dict=data)) + workflow_data = set_result_node( + workflow_dict=update_node_names(workflow_dict=data) + ) - PythonWorkflowDefinitionWorkflow(**workflow_data).dump_json_file(file_name=file_name, indent=2) + PythonWorkflowDefinitionWorkflow(**workflow_data).dump_json_file( + file_name=file_name, indent=2 + )