diff --git a/.github/workflows/pipeline.yml b/.github/workflows/pipeline.yml index 8c07cf8..add96a9 100644 --- a/.github/workflows/pipeline.yml +++ b/.github/workflows/pipeline.yml @@ -138,6 +138,30 @@ jobs: papermill pyiron_workflow.ipynb pyiron_workflow_out.ipynb -k "python3" papermill universal_workflow.ipynb universal_workflow_out.ipynb -k "python3" + nested: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Conda config + run: echo -e "channels:\n - conda-forge\n" > .condarc + - uses: conda-incubator/setup-miniconda@v3 + with: + python-version: "3.12" + miniforge-version: latest + condarc-file: .condarc + environment-file: binder/environment.yml + - name: Installation and setup + shell: bash -l {0} + run: | + pip install --no-deps --no-build-isolation -e . + conda install -c conda-forge jupyter papermill + verdi presto --profile-name pwd + - name: Tests + shell: bash -l {0} + run: | + cd example_workflows/nested + papermill aiida.ipynb aiida_out.ipynb -k "python3" + documentation: runs-on: ubuntu-latest steps: diff --git a/.gitignore b/.gitignore index e464d07..93c5705 100644 --- a/.gitignore +++ b/.gitignore @@ -200,3 +200,4 @@ aiida_to_jobflow_qe.json pyiron_base_to_aiida_simple.json pyiron_base_to_jobflow_qe.json **/*.h5 +**/html/ diff --git a/example_workflows/nested/aiida.ipynb b/example_workflows/nested/aiida.ipynb new file mode 100644 index 0000000..bf8c250 --- /dev/null +++ b/example_workflows/nested/aiida.ipynb @@ -0,0 +1,707 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# Nested Workflows with AiiDA\n", + "\n", + "This notebook demonstrates nested workflow support with load and write functionality." + ] + }, + { + "cell_type": "markdown", + "id": "1", + "metadata": {}, + "source": [ + "## Define Nested Workflow with AiiDA" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "2", + "metadata": {}, + "outputs": [], + "source": [ + "from python_workflow_definition.aiida import write_workflow_json, load_workflow_json\n", + "\n", + "from aiida_workgraph import WorkGraph, task, namespace\n", + "from aiida import orm, load_profile\n", + "\n", + "load_profile()\n", + "\n", + "workflow_json_filename = \"nested_test.pwd.json\"" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "3", + "metadata": {}, + "outputs": [], + "source": [ + "from workflow import (\n", + " get_sum as _get_sum,\n", + " get_prod_and_div as _get_prod_and_div,\n", + " get_square as _get_square,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "4", + "metadata": {}, + "outputs": [], + "source": [ + "# Wrap the functions with @task decorator\n", + "get_prod_and_div = task(outputs=[\"prod\", \"div\"])(_get_prod_and_div)\n", + "get_sum = task(_get_sum)\n", + "get_square = task(_get_square)" + ] + }, + { + "cell_type": "markdown", + "id": "5", + "metadata": {}, + "source": [ + "### Create Nested Workflow" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "6", + "metadata": {}, + "outputs": [], + "source": [ + "# Create nested workflow manually (corresponds to prod_div.json)\n", + "nested_wg = WorkGraph(\n", + " name=\"nested_workflow\",\n", + " inputs=namespace(x=namespace, y=namespace),\n", + " outputs=namespace(result=namespace),\n", + ")\n", + "\n", + "# Add tasks to nested workflow\n", + "t1 = nested_wg.add_task(get_prod_and_div)\n", + "t2 = nested_wg.add_task(get_sum)\n", + "t3 = nested_wg.add_task(get_square)\n", + "\n", + "# Connect nested workflow inputs to first task\n", + "nested_wg.add_link(nested_wg.inputs.x, t1.inputs.x)\n", + "nested_wg.add_link(nested_wg.inputs.y, t1.inputs.y)\n", + "\n", + "# Connect tasks within nested workflow\n", + "nested_wg.add_link(t1.outputs.prod, t2.inputs.x)\n", + "nested_wg.add_link(t1.outputs.div, t2.inputs.y)\n", + "nested_wg.add_link(t2.outputs.result, t3.inputs.x)\n", + "\n", + "# Connect nested workflow output\n", + "nested_wg.outputs.result = t3.outputs.result\n", + "\n", + "# Set default values for nested workflow inputs\n", + "nested_wg.inputs.x.value = orm.Float(1)\n", + "nested_wg.inputs.y.value = orm.Float(2)" + ] + }, + { + "cell_type": "markdown", + "id": "7", + "metadata": {}, + "source": [ + "### Create Main Workflow with Nested Workflow" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "8", + "metadata": {}, + "outputs": [], + "source": [ + "# Create main workflow (corresponds to main.pwd.json)\n", + "main_wg = WorkGraph(\n", + " name=\"main_workflow\",\n", + " inputs=namespace(a=namespace, b=namespace, c=namespace),\n", + " outputs=namespace(final_result=namespace),\n", + ")\n", + "\n", + "# Add tasks to main workflow\n", + "preprocessing = main_wg.add_task(get_prod_and_div)\n", + "nested_task = main_wg.add_task(nested_wg) # Add the nested workflow as a task\n", + "postprocessing = main_wg.add_task(get_sum)\n", + "\n", + "# Connect main workflow inputs to preprocessing\n", + "main_wg.add_link(main_wg.inputs.a, preprocessing.inputs.x)\n", + "main_wg.add_link(main_wg.inputs.c, preprocessing.inputs.y)\n", + "\n", + "# Connect preprocessing to nested workflow\n", + "main_wg.add_link(preprocessing.outputs.prod, nested_task.inputs.x)\n", + "main_wg.add_link(preprocessing.outputs.div, nested_task.inputs.y)\n", + "\n", + "# Connect nested workflow to postprocessing\n", + "main_wg.add_link(nested_task.outputs.result, postprocessing.inputs.x)\n", + "main_wg.add_link(main_wg.inputs.b, postprocessing.inputs.y)\n", + "\n", + "# Connect main workflow output\n", + "main_wg.outputs.final_result = postprocessing.outputs.result\n", + "\n", + "# Set default values for main workflow inputs\n", + "main_wg.inputs.a.value = orm.Float(3)\n", + "main_wg.inputs.b.value = orm.Float(2)\n", + "main_wg.inputs.c.value = orm.Float(4)" + ] + }, + { + "cell_type": "markdown", + "id": "9", + "metadata": {}, + "source": [ + "### Export Workflow to JSON" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "10", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Exported workflow to nested_test.pwd.json\n" + ] + } + ], + "source": [ + "write_workflow_json(wg=main_wg, file_name=workflow_json_filename)\n", + "print(f\"Exported workflow to {workflow_json_filename}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "11", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[38;5;238m───────┬────────────────────────────────────────────────────────────────────────\u001b[0m\n", + " \u001b[38;5;238m│ \u001b[0mFile: \u001b[1mnested_test.pwd.json\u001b[0m\n", + "\u001b[38;5;238m───────┼────────────────────────────────────────────────────────────────────────\u001b[0m\n", + "\u001b[38;5;238m 1\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 2\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mversion\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186m0.1.1\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 3\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mnodes\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;231m[\u001b[0m\n", + "\u001b[38;5;238m 4\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 5\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 6\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mfunction\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 7\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mworkflow.get_prod_and_div\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 8\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 9\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 10\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 11\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mworkflow\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 12\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mnested_1.json\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 13\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 14\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 15\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 16\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mfunction\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 17\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mworkflow.get_sum\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 18\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 19\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 20\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m3\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 21\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186minput\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 22\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mname\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186ma\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 23\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m3\u001b[0m\n", + "\u001b[38;5;238m 24\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 25\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 26\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m4\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 27\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186minput\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 28\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mname\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mb\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 29\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\n", + "\u001b[38;5;238m 30\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 31\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 32\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m5\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 33\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186minput\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 34\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mname\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mc\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 35\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m4\u001b[0m\n", + "\u001b[38;5;238m 36\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 37\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 38\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m6\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 39\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186moutput\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 40\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mname\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mfinal_result\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 41\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\n", + "\u001b[38;5;238m 42\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m]\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 43\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208medges\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;231m[\u001b[0m\n", + "\u001b[38;5;238m 44\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 45\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 46\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mx\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 47\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m3\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 48\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 49\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 50\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 51\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 52\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186my\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 53\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m5\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 54\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 55\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 56\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 57\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 58\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mx\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 59\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 60\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mprod\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 61\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 62\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 63\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 64\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186my\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 65\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 66\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mdiv\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 67\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 68\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 69\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 70\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mx\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 71\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 72\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 73\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 74\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 75\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 76\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186my\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 77\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m4\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 78\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 79\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 80\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 81\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m6\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 82\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 83\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 84\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 85\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\n", + "\u001b[38;5;238m 86\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m]\u001b[0m\n", + "\u001b[38;5;238m 87\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m}\u001b[0m\n", + "\u001b[38;5;238m───────┴────────────────────────────────────────────────────────────────────────\u001b[0m\n" + ] + } + ], + "source": [ + "!cat {workflow_json_filename}" + ] + }, + { + "cell_type": "markdown", + "id": "12", + "metadata": {}, + "source": [ + "### Check Nested Workflow File" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "13", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[38;5;238m───────┬────────────────────────────────────────────────────────────────────────\u001b[0m\n", + " \u001b[38;5;238m│ \u001b[0mFile: \u001b[1mnested_1.json\u001b[0m\n", + "\u001b[38;5;238m───────┼────────────────────────────────────────────────────────────────────────\u001b[0m\n", + "\u001b[38;5;238m 1\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 2\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mversion\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186m0.1.1\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 3\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mnodes\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;231m[\u001b[0m\n", + "\u001b[38;5;238m 4\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 5\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 6\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mfunction\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 7\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mworkflow.get_prod_and_div\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 8\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 9\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 10\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 11\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mfunction\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 12\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mworkflow.get_sum\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 13\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 14\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 15\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 16\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mfunction\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 17\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mworkflow.get_square\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 18\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 19\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 20\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m3\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 21\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186minput\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 22\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mname\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mx\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 23\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\n", + "\u001b[38;5;238m 24\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 25\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 26\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m4\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 27\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186minput\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 28\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mname\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186my\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 29\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mvalue\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\n", + "\u001b[38;5;238m 30\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 31\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 32\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mid\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m5\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 33\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtype\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186moutput\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 34\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mname\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mresult\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 35\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\n", + "\u001b[38;5;238m 36\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m]\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 37\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208medges\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;231m[\u001b[0m\n", + "\u001b[38;5;238m 38\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 39\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 40\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mx\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 41\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m3\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 42\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 43\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 44\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 45\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 46\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186my\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 47\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m4\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 48\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 49\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 50\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 51\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 52\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mx\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 53\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 54\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mprod\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 55\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 56\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 57\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 58\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186my\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 59\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m0\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 60\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mdiv\u001b[0m\u001b[38;5;186m\"\u001b[0m\n", + "\u001b[38;5;238m 61\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 62\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 63\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 64\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;186mx\u001b[0m\u001b[38;5;186m\"\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 65\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m1\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 66\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 67\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 68\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m{\u001b[0m\n", + "\u001b[38;5;238m 69\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtarget\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m5\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 70\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208mtargetPort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 71\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msource\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141m2\u001b[0m\u001b[38;5;231m,\u001b[0m\n", + "\u001b[38;5;238m 72\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;208msourcePort\u001b[0m\u001b[38;5;208m\"\u001b[0m\u001b[38;5;231m:\u001b[0m\u001b[38;5;231m \u001b[0m\u001b[38;5;141mnull\u001b[0m\n", + "\u001b[38;5;238m 73\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m}\u001b[0m\n", + "\u001b[38;5;238m 74\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m \u001b[0m\u001b[38;5;231m]\u001b[0m\n", + "\u001b[38;5;238m 75\u001b[0m \u001b[38;5;238m│\u001b[0m \u001b[38;5;231m}\u001b[0m\n", + "\u001b[38;5;238m───────┴────────────────────────────────────────────────────────────────────────\u001b[0m\n" + ] + } + ], + "source": [ + "!cat nested_1.json" + ] + }, + { + "cell_type": "markdown", + "id": "14", + "metadata": {}, + "source": [ + "## Load and Verify Workflow" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "15", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loaded workflow: WorkGraph\n", + "Number of tasks: 3\n", + "\n", + "Inputs:\n", + " a = 3\n", + " b = 2\n", + " c = 4\n", + "\n", + "Nested workflows:\n", + " Found 'WorkGraph' with 3 tasks\n", + " Default inputs:\n", + " x = 1\n", + " y = 2\n" + ] + } + ], + "source": [ + "# Load the workflow back\n", + "wg_loaded = load_workflow_json(workflow_json_filename)\n", + "\n", + "print(f\"Loaded workflow: {wg_loaded.name}\")\n", + "print(f\"Number of tasks: {len([t for t in wg_loaded.tasks if t.name not in ['graph_inputs', 'graph_outputs', 'graph_ctx']])}\")\n", + "\n", + "# Check inputs\n", + "print(\"\\nInputs:\")\n", + "for name, socket in wg_loaded.inputs._sockets.items():\n", + " if not name.startswith('_') and name != 'metadata':\n", + " if hasattr(socket, 'value') and socket.value is not None:\n", + " value = socket.value.value if hasattr(socket.value, 'value') else socket.value\n", + " print(f\" {name} = {value}\")\n", + "\n", + "# Check for nested workflows\n", + "print(\"\\nNested workflows:\")\n", + "for task in wg_loaded.tasks:\n", + " if hasattr(task, 'tasks'):\n", + " nested_tasks = [t for t in task.tasks if t.name not in ['graph_inputs', 'graph_outputs', 'graph_ctx']]\n", + " if len(nested_tasks) > 0:\n", + " print(f\" Found '{task.name}' with {len(nested_tasks)} tasks\")\n", + " # Check nested workflow defaults\n", + " for subtask in task.tasks:\n", + " if subtask.name == 'graph_inputs' and hasattr(subtask, 'outputs'):\n", + " print(\" Default inputs:\")\n", + " for out in subtask.outputs:\n", + " if hasattr(out, '_name') and not out._name.startswith('_'):\n", + " value = out.value.value if hasattr(out.value, 'value') else out.value\n", + " print(f\" {out._name} = {value}\")" + ] + }, + { + "cell_type": "markdown", + "id": "16", + "metadata": {}, + "source": [ + "## Round-Trip Test" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "17", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Round-trip test: PASS\n", + "Workflow export/import is stable and idempotent\n" + ] + } + ], + "source": [ + "import json\n", + "from pathlib import Path\n", + "\n", + "# Export the loaded workflow again\n", + "roundtrip_file = \"nested_roundtrip.pwd.json\"\n", + "write_workflow_json(wg_loaded, roundtrip_file)\n", + "\n", + "# Compare the two exports\n", + "with open(workflow_json_filename) as f1, open(roundtrip_file) as f2:\n", + " data1 = json.load(f1)\n", + " data2 = json.load(f2)\n", + "\n", + "match = json.dumps(data1, sort_keys=True) == json.dumps(data2, sort_keys=True)\n", + "print(f\"Round-trip test: {'PASS' if match else 'FAIL'}\")\n", + "\n", + "if not match:\n", + " print(\"\\nDifferences found!\")\n", + " raise AssertionError(\"Round-trip test failed\")\n", + "else:\n", + " print(\"Workflow export/import is stable and idempotent\")" + ] + }, + { + "cell_type": "markdown", + "id": "18", + "metadata": {}, + "source": [ + "## Load Workflow with Other Frameworks" + ] + }, + { + "cell_type": "markdown", + "id": "19", + "metadata": {}, + "source": [ + "### Load Workflow with jobflow" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "20", + "metadata": {}, + "outputs": [], + "source": [ + "# from python_workflow_definition.jobflow import load_workflow_json" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "21", + "metadata": {}, + "outputs": [], + "source": [ + "# from jobflow.managers.local import run_locally" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "22", + "metadata": {}, + "outputs": [], + "source": [ + "# flow = load_workflow_json(file_name=workflow_json_filename)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "23", + "metadata": {}, + "outputs": [], + "source": [ + "# result = run_locally(flow)\n", + "# result" + ] + }, + { + "cell_type": "markdown", + "id": "24", + "metadata": {}, + "source": [ + "### Load Workflow with pyiron_base" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "25", + "metadata": {}, + "outputs": [], + "source": [ + "# from python_workflow_definition.pyiron_base import load_workflow_json" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "26", + "metadata": {}, + "outputs": [], + "source": [ + "# delayed_object_lst = load_workflow_json(file_name=workflow_json_filename)\n", + "# delayed_object_lst[-1].draw()" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "27", + "metadata": {}, + "outputs": [], + "source": [ + "# delayed_object_lst[-1].pull()" + ] + }, + { + "cell_type": "markdown", + "id": "28", + "metadata": {}, + "source": [ + "### Load Workflow with pyiron_workflow" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "29", + "metadata": {}, + "outputs": [], + "source": [ + "# from python_workflow_definition.pyiron_workflow import load_workflow_json" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "30", + "metadata": {}, + "outputs": [], + "source": [ + "# wf = load_workflow_json(file_name=workflow_json_filename)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "31", + "metadata": {}, + "outputs": [], + "source": [ + "# wf.draw(size=(10, 10))" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "32", + "metadata": {}, + "outputs": [], + "source": [ + "# wf.run()" + ] + }, + { + "cell_type": "markdown", + "id": "33", + "metadata": {}, + "source": [ + "## Cleanup" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "34", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Removed nested_test.pwd.json\n", + "Removed nested_roundtrip.pwd.json\n", + "Removed nested_1.json\n" + ] + } + ], + "source": [ + "# Clean up test files\n", + "import os\n", + "for f in [workflow_json_filename, roundtrip_file, \"nested_1.json\"]:\n", + " if os.path.exists(f):\n", + " os.remove(f)\n", + " print(f\"Removed {f}\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "PWD", + "language": "python", + "name": "pwd" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/example_workflows/nested/load_aiida.py b/example_workflows/nested/load_aiida.py new file mode 100644 index 0000000..7cc7689 --- /dev/null +++ b/example_workflows/nested/load_aiida.py @@ -0,0 +1,12 @@ +from python_workflow_definition.aiida import load_workflow_json + +from aiida import load_profile + +load_profile() + +workflow_json_filename = "main.pwd.json" + +wg = load_workflow_json(workflow_json_filename) + +wg.to_html() +wg.run() diff --git a/example_workflows/nested/main.pwd.json b/example_workflows/nested/main.pwd.json new file mode 100644 index 0000000..df08ddb --- /dev/null +++ b/example_workflows/nested/main.pwd.json @@ -0,0 +1,21 @@ +{ + "version": "0.1.1", + "nodes": [ + { "id": 0, "value": 3, "type": "input", "name": "a" }, + { "id": 1, "value": 2, "type": "input", "name": "b" }, + { "id": 2, "value": 4, "type": "input", "name": "c" }, + { "id": 3, "type": "function", "value": "workflow.get_prod_and_div" }, + { "id": 4, "type": "workflow", "value": "prod_div.json" }, + { "id": 5, "type": "function", "value": "workflow.get_sum" }, + { "id": 6, "type": "output", "name": "final_result" } + ], + "edges": [ + { "target": 3, "targetPort": "x", "source": 0, "sourcePort": null }, + { "target": 3, "targetPort": "y", "source": 2, "sourcePort": null }, + { "target": 4, "targetPort": "x", "source": 3, "sourcePort": "prod" }, + { "target": 4, "targetPort": "y", "source": 3, "sourcePort": "div" }, + { "target": 5, "targetPort": "x", "source": 4, "sourcePort": "result" }, + { "target": 5, "targetPort": "y", "source": 1, "sourcePort": null }, + { "target": 6, "targetPort": null, "source": 5, "sourcePort": null } + ] +} diff --git a/example_workflows/nested/prod_div.json b/example_workflows/nested/prod_div.json new file mode 100644 index 0000000..60c4221 --- /dev/null +++ b/example_workflows/nested/prod_div.json @@ -0,0 +1,19 @@ +{ + "version": "0.1.1", + "nodes": [ + { "id": 0, "type": "function", "value": "workflow.get_prod_and_div" }, + { "id": 1, "type": "function", "value": "workflow.get_sum" }, + { "id": 2, "type": "function", "value": "workflow.get_square" }, + { "id": 3, "type": "input", "value": 1, "name": "x" }, + { "id": 4, "type": "input", "value": 2, "name": "y" }, + { "id": 5, "type": "output", "name": "result" } + ], + "edges": [ + { "target": 0, "targetPort": "x", "source": 3, "sourcePort": null }, + { "target": 0, "targetPort": "y", "source": 4, "sourcePort": null }, + { "target": 1, "targetPort": "x", "source": 0, "sourcePort": "prod" }, + { "target": 1, "targetPort": "y", "source": 0, "sourcePort": "div" }, + { "target": 2, "targetPort": "x", "source": 1, "sourcePort": null }, + { "target": 5, "targetPort": null, "source": 2, "sourcePort": null } + ] +} \ No newline at end of file diff --git a/example_workflows/nested/round_trip.py b/example_workflows/nested/round_trip.py new file mode 100644 index 0000000..59c08ca --- /dev/null +++ b/example_workflows/nested/round_trip.py @@ -0,0 +1,149 @@ +""" +Round-trip test for nested workflows. + +This script demonstrates that: +1. Loading a nested workflow JSON preserves all structure and values +2. Exporting a loaded workflow produces identical JSON +3. Multiple round-trips are stable (load -> export -> load -> export produces identical results) +""" + +import json +from pathlib import Path +from python_workflow_definition.aiida import load_workflow_json, write_workflow_json +from aiida import load_profile + +# Load AiiDA profile +load_profile() + + +def compare_json_files(file1: str, file2: str) -> bool: + """Compare two JSON files for structural equality.""" + with open(file1) as f1, open(file2) as f2: + data1 = json.load(f1) + data2 = json.load(f2) + # Compare as sorted JSON strings to ignore ordering + return json.dumps(data1, sort_keys=True) == json.dumps(data2, sort_keys=True) + + +def print_workflow_info(wg, name: str): + """Print information about a loaded workflow.""" + print(f"\n{name}:") + + # Count tasks (excluding internal graph tasks) + task_count = len([t for t in wg.tasks if t.name not in ["graph_inputs", "graph_outputs", "graph_ctx"]]) + print(f" Tasks: {task_count}") + + # Show inputs + if hasattr(wg.inputs, '_sockets'): + print(" Inputs:") + for name, socket in wg.inputs._sockets.items(): + if not name.startswith('_') and name != 'metadata': + if hasattr(socket, 'value') and socket.value is not None: + value = socket.value.value if hasattr(socket.value, 'value') else socket.value + print(f" {name} = {value}") + + # Show outputs + if hasattr(wg.outputs, '_sockets'): + output_names = [name for name in wg.outputs._sockets.keys() + if not name.startswith('_') and name != 'metadata'] + if output_names: + print(f" Outputs: {', '.join(output_names)}") + + # Check for nested workflows + nested_count = 0 + for task in wg.tasks: + if hasattr(task, 'tasks'): + nested_tasks = [t for t in task.tasks if t.name not in ['graph_inputs', 'graph_outputs', 'graph_ctx']] + if len(nested_tasks) > 0: + nested_count += 1 + print(f" Nested workflow '{task.name}' with {len(nested_tasks)} tasks") + # Show nested workflow defaults + for subtask in task.tasks: + if subtask.name == 'graph_inputs' and hasattr(subtask, 'outputs'): + print(" Default inputs:") + for out in subtask.outputs: + if hasattr(out, '_name') and not out._name.startswith('_'): + value = out.value.value if hasattr(out.value, 'value') else out.value + print(f" {out._name} = {value}") + + +def main(): + print("=" * 70) + print("NESTED WORKFLOW ROUND-TRIP TEST") + print("=" * 70) + + # Define file paths + original_file = "main.pwd.json" + roundtrip1_file = "roundtrip1.pwd.json" + roundtrip2_file = "roundtrip2.pwd.json" + nested_original = "prod_div.json" + nested_export = "nested_1.json" + + # Test 1: Load original workflow + print("\n[1] Loading original workflow...") + wg_original = load_workflow_json(original_file) + print_workflow_info(wg_original, "Original workflow") + + # Test 2: Export to roundtrip1 + print("\n[2] Exporting to roundtrip1.pwd.json...") + write_workflow_json(wg_original, roundtrip1_file) + print(f" Exported main workflow to {roundtrip1_file}") + if Path(nested_export).exists(): + print(f" Exported nested workflow to {nested_export}") + + # Test 3: Load roundtrip1 + print("\n[3] Loading roundtrip1.pwd.json...") + wg_roundtrip1 = load_workflow_json(roundtrip1_file) + print_workflow_info(wg_roundtrip1, "Roundtrip 1 workflow") + + # Test 4: Export to roundtrip2 + print("\n[4] Exporting to roundtrip2.pwd.json...") + write_workflow_json(wg_roundtrip1, roundtrip2_file) + print(f" Exported to {roundtrip2_file}") + + # Test 5: Compare files + print("\n[5] Comparing JSON files...") + print("-" * 70) + + # Compare main workflows + main_match = compare_json_files(roundtrip1_file, roundtrip2_file) + print(f" roundtrip1 == roundtrip2: {'PASS' if main_match else 'FAIL'}") + + # Compare nested workflows + if Path(nested_original).exists() and Path(nested_export).exists(): + nested_match = compare_json_files(nested_original, nested_export) + print(f" {nested_original} == {nested_export}: {'PASS' if nested_match else 'FAIL'}") + else: + nested_match = True # If files don't exist, consider it a pass + + # Test 6: Load roundtrip2 and verify + print("\n[6] Loading roundtrip2.pwd.json for verification...") + wg_roundtrip2 = load_workflow_json(roundtrip2_file) + print_workflow_info(wg_roundtrip2, "Roundtrip 2 workflow") + + # Final verdict + print("\n" + "=" * 70) + if main_match and nested_match: + print("RESULT: ALL TESTS PASSED") + print(" - Workflow structure preserved") + print(" - Input/output values preserved") + print(" - Nested workflow defaults preserved") + print(" - Round-trip is stable and idempotent") + result = 0 + else: + print("RESULT: SOME TESTS FAILED") + result = 1 + print("=" * 70) + + # Cleanup + print("\nCleaning up temporary files...") + for temp_file in [roundtrip1_file, roundtrip2_file, nested_export]: + if Path(temp_file).exists(): + Path(temp_file).unlink() + print(f" Removed {temp_file}") + + return result + + +if __name__ == "__main__": + exit(main()) diff --git a/example_workflows/nested/workflow.py b/example_workflows/nested/workflow.py new file mode 100644 index 0000000..1a2e4c3 --- /dev/null +++ b/example_workflows/nested/workflow.py @@ -0,0 +1,10 @@ +def get_prod_and_div(x, y): + return {"prod": x * y, "div": x / y} + + +def get_sum(x, y): + return x + y + + +def get_square(x): + return x ** 2 diff --git a/example_workflows/nested/write_aiida.py b/example_workflows/nested/write_aiida.py new file mode 100644 index 0000000..94a78c3 --- /dev/null +++ b/example_workflows/nested/write_aiida.py @@ -0,0 +1,83 @@ +from aiida_workgraph import task, WorkGraph, namespace +from aiida import load_profile, orm +from python_workflow_definition.aiida import write_workflow_json +from workflow import get_prod_and_div as _get_prod_and_div, get_sum as _get_sum, get_square as _get_square + +load_profile() + + +# Wrap the functions with @task decorator +get_prod_and_div = task(outputs=["prod", "div"])(_get_prod_and_div) +get_sum = task(_get_sum) +get_square = task(_get_square) + + +# Create nested workflow manually (corresponds to prod_div.json) +nested_wg = WorkGraph( + name="nested_workflow", + inputs=namespace(x=namespace, y=namespace), + outputs=namespace(result=namespace), +) + +# Add tasks to nested workflow +t1 = nested_wg.add_task(get_prod_and_div) +t2 = nested_wg.add_task(get_sum) +t3 = nested_wg.add_task(get_square) + +# Connect nested workflow inputs to first task +nested_wg.add_link(nested_wg.inputs.x, t1.inputs.x) +nested_wg.add_link(nested_wg.inputs.y, t1.inputs.y) + +# Connect tasks within nested workflow +nested_wg.add_link(t1.outputs.prod, t2.inputs.x) +nested_wg.add_link(t1.outputs.div, t2.inputs.y) +nested_wg.add_link(t2.outputs.result, t3.inputs.x) + +# Connect nested workflow output +nested_wg.outputs.result = t3.outputs.result + +# Set default values for nested workflow inputs +nested_wg.inputs.x.value = orm.Float(1) +nested_wg.inputs.y.value = orm.Float(2) + + +# Create main workflow (corresponds to main.pwd.json) +main_wg = WorkGraph( + name="main_workflow", + inputs=namespace(a=namespace, b=namespace, c=namespace), + outputs=namespace(final_result=namespace), +) + +# Add tasks to main workflow +preprocessing = main_wg.add_task(get_prod_and_div) +nested_task = main_wg.add_task(nested_wg) # Add the nested workflow as a task +postprocessing = main_wg.add_task(get_sum) + +# Connect main workflow inputs to preprocessing +main_wg.add_link(main_wg.inputs.a, preprocessing.inputs.x) +main_wg.add_link(main_wg.inputs.c, preprocessing.inputs.y) + +# Connect preprocessing to nested workflow +main_wg.add_link(preprocessing.outputs.prod, nested_task.inputs.x) +main_wg.add_link(preprocessing.outputs.div, nested_task.inputs.y) + +# Connect nested workflow to postprocessing +main_wg.add_link(nested_task.outputs.result, postprocessing.inputs.x) +main_wg.add_link(main_wg.inputs.b, postprocessing.inputs.y) + +# Connect main workflow output +main_wg.outputs.final_result = postprocessing.outputs.result + +# Set default values for main workflow inputs +main_wg.inputs.a.value = orm.Float(3) +main_wg.inputs.b.value = orm.Float(2) +main_wg.inputs.c.value = orm.Float(4) + + +# Export to JSON (will create main_generated.pwd.json and nested_1.json) +print("Exporting workflow to JSON files...") +write_workflow_json(wg=main_wg, file_name="main_generated.pwd.json") +print("✓ Exported to main_generated.pwd.json and nested_1.json") + +# Optionally run the workflow +# main_wg.run() diff --git a/src/python_workflow_definition/aiida.py b/src/python_workflow_definition/aiida.py index 16366ee..a2e22d9 100644 --- a/src/python_workflow_definition/aiida.py +++ b/src/python_workflow_definition/aiida.py @@ -1,5 +1,7 @@ from importlib import import_module import traceback +from pathlib import Path +from typing import Any from aiida import orm from aiida_pythonjob.data.serializer import general_serializer @@ -24,72 +26,202 @@ def load_workflow_json(file_name: str) -> WorkGraph: + """Load a workflow from JSON with support for nested workflows. + This function recursively loads workflows, properly exposing inputs/outputs + of nested workflows so they can be connected in the parent workflow. + """ data = PythonWorkflowDefinitionWorkflow.load_json_file(file_name=file_name) + parent_dir = Path(file_name).parent + + # Check if this workflow has workflow-type nodes (nested workflows) + has_nested = any(n["type"] == "workflow" for n in data[NODES_LABEL]) + + # Extract input/output nodes for this workflow + input_nodes = [n for n in data[NODES_LABEL] if n["type"] == "input"] + output_nodes = [n for n in data[NODES_LABEL] if n["type"] == "output"] + + # Create WorkGraph with proper inputs/outputs if this will be used as a sub-workflow + if has_nested or input_nodes or output_nodes: + # Build namespace for inputs + inputs_ns = {} + for inp_node in input_nodes: + inputs_ns[inp_node["name"]] = namespace + + # Build namespace for outputs + outputs_ns = {} + for out_node in output_nodes: + outputs_ns[out_node["name"]] = namespace + + wg = WorkGraph( + inputs=namespace(**inputs_ns) if inputs_ns else None, + outputs=namespace(**outputs_ns) if outputs_ns else None, + ) + else: + wg = WorkGraph() - wg = WorkGraph() task_name_mapping = {} + input_node_mapping = {} # Map input node IDs to their names - for id, identifier in convert_nodes_list_to_dict( - nodes_list=data[NODES_LABEL] - ).items(): - if isinstance(identifier, str) and "." in identifier: - p, m = identifier.rsplit(".", 1) + # Process nodes + for node in data[NODES_LABEL]: + node_id = str(node["id"]) + node_type = node["type"] + + if node_type == "function": + # Handle function nodes + func_path = node["value"] + p, m = func_path.rsplit(".", 1) mod = import_module(p) func = getattr(mod, m) decorated_func = task(outputs=namespace())(func) new_task = wg.add_task(decorated_func) new_task.spec = replace(new_task.spec, schema_source=SchemaSource.EMBEDDED) - task_name_mapping[id] = new_task - else: - # data task - data_node = general_serializer(identifier) - task_name_mapping[id] = data_node + task_name_mapping[node_id] = new_task + + elif node_type == "workflow": + # Handle nested workflow nodes + workflow_file = node["value"] + # Resolve path relative to parent workflow file + workflow_path = parent_dir / workflow_file + + # Recursively load the sub-workflow with proper input/output exposure + sub_wg = load_workflow_json(file_name=str(workflow_path)) + + # Add the sub-workflow as a task - it will automatically have the right inputs/outputs + workflow_task = wg.add_task(sub_wg) + task_name_mapping[node_id] = workflow_task - # add links + elif node_type == "input": + # Store input node info for later connection to wg.inputs + input_node_mapping[node_id] = node["name"] + # Set default value on the workflow's exposed input if provided + if "value" in node and node["value"] is not None: + value = node["value"] + data_node = general_serializer(value) + # Set the default on the workflow's exposed input + if hasattr(wg.inputs, node["name"]): + setattr(wg.inputs, node["name"], data_node) + # Also store in mapping for direct connections in non-nested contexts + task_name_mapping[node_id] = data_node + + elif node_type == "output": + # Output nodes will be handled when setting wg.outputs + pass + + # Add links for link in data[EDGES_LABEL]: - # TODO: continue here - to_task = task_name_mapping[str(link[TARGET_LABEL])] - # if the input is not exit, it means we pass the data into to the kwargs - # in this case, we add the input socket - if isinstance(to_task, Task): - if link[TARGET_PORT_LABEL] not in to_task.inputs: - to_socket = to_task.add_input_spec( - "workgraph.any", name=link[TARGET_PORT_LABEL] - ) - else: - to_socket = to_task.inputs[link[TARGET_PORT_LABEL]] - from_task = task_name_mapping[str(link[SOURCE_LABEL])] - if isinstance(from_task, orm.Data): - to_socket.value = from_task - else: - try: - if link[SOURCE_PORT_LABEL] is None: - link[SOURCE_PORT_LABEL] = "result" - # because we are not define the outputs explicitly during the pythonjob creation - # we add it here, and assume the output exit - if link[SOURCE_PORT_LABEL] not in from_task.outputs: - # if str(link["sourcePort"]) not in from_task.outputs: + source_id = str(link[SOURCE_LABEL]) + target_id = str(link[TARGET_LABEL]) + source_port = link[SOURCE_PORT_LABEL] + target_port = link[TARGET_PORT_LABEL] + + # Handle output node connections + target_node = next( + (n for n in data[NODES_LABEL] if str(n["id"]) == target_id), None + ) + if target_node and target_node["type"] == "output": + # This connects a task output to a workflow output + from_task = task_name_mapping.get(source_id) + if from_task and isinstance(from_task, Task): + if source_port is None: + source_port = "result" + if source_port not in from_task.outputs: from_socket = from_task.add_output_spec( - "workgraph.any", - name=link[SOURCE_PORT_LABEL], + "workgraph.any", name=source_port + ) + else: + from_socket = from_task.outputs[source_port] + + # Set the workflow output + output_name = target_node["name"] + if hasattr(wg.outputs, output_name): + setattr(wg.outputs, output_name, from_socket) + continue + + # Handle input node connections + source_node = next( + (n for n in data[NODES_LABEL] if str(n["id"]) == source_id), None + ) + if source_node and source_node["type"] == "input": + to_task = task_name_mapping.get(target_id) + if to_task and isinstance(to_task, Task): + # Add target socket if it doesn't exist + if target_port not in to_task.inputs: + to_socket = to_task.add_input_spec( + "workgraph.any", name=target_port ) else: - from_socket = from_task.outputs[link[SOURCE_PORT_LABEL]] - if isinstance(to_task, Task): + to_socket = to_task.inputs[target_port] + + # Connect from workflow input or from data node + if hasattr(wg.inputs, source_node["name"]): + # Connect from workflow input + from_socket = getattr(wg.inputs, source_node["name"]) + wg.add_link(from_socket, to_socket) + elif source_id in task_name_mapping: + # Connect from data node (has a value) + data_node = task_name_mapping[source_id] + if isinstance(data_node, orm.Data): + to_socket.value = data_node + continue + + # Handle regular task-to-task connections + to_task = task_name_mapping.get(target_id) + from_task = task_name_mapping.get(source_id) + + if to_task is None or from_task is None: + continue + + if isinstance(to_task, Task): + # Add target socket if needed + if target_port not in to_task.inputs: + to_socket = to_task.add_input_spec("workgraph.any", name=target_port) + else: + to_socket = to_task.inputs[target_port] + + if isinstance(from_task, orm.Data): + to_socket.value = from_task + elif isinstance(from_task, Task): + try: + if source_port is None: + source_port = "result" + + # Add source socket if needed + if source_port not in from_task.outputs: + from_socket = from_task.add_output_spec( + "workgraph.any", name=source_port + ) + else: + from_socket = from_task.outputs[source_port] + wg.add_link(from_socket, to_socket) - except Exception as e: - traceback.print_exc() - print("Failed to link", link, "with error:", e) + except Exception as e: + traceback.print_exc() + print("Failed to link", link, "with error:", e) + return wg -def write_workflow_json(wg: WorkGraph, file_name: str) -> dict: +def write_workflow_json( + wg: WorkGraph, file_name: str, _nested_counter: dict = None +) -> dict: + """Write a WorkGraph to JSON file(s), with support for nested workflows. + + Args: + wg: The WorkGraph to write + file_name: Output JSON file path + _nested_counter: Internal counter for generating nested workflow filenames + """ + if _nested_counter is None: + _nested_counter = {"count": 0} + data = {NODES_LABEL: [], EDGES_LABEL: []} node_name_mapping = {} data_node_name_mapping = {} i = 0 GRAPH_LEVEL_NAMES = ["graph_inputs", "graph_outputs", "graph_ctx"] + parent_dir = Path(file_name).parent for node in wg.tasks: @@ -98,29 +230,215 @@ def write_workflow_json(wg: WorkGraph, file_name: str) -> dict: node_name_mapping[node.name] = i + # Try to determine if this is a nested WorkGraph or a regular function task executor = node.get_executor() - callable_name = f"{executor.module_path}.{executor.callable_name}" - data[NODES_LABEL].append({"id": i, "type": "function", "value": callable_name}) + # Check if this is a SubGraph-type task (truly nested workflow) + # Note: GraphTask (from @task.graph) is flattened and can't be exported as nested + is_graph = False + nested_wg = None + + # Method 1: Check if this is a SubGraphTask (has spec.node_type == 'SubGraph') + if hasattr(node, "spec") and hasattr(node.spec, "node_type"): + if node.spec.node_type == "SubGraph" and hasattr(node, "tasks"): + is_graph = True + nested_wg = node + + # Method 2: Check if the node itself has tasks attribute (indicating it's a subgraph) + if not is_graph and hasattr(node, "tasks"): + # Make sure it has actual tasks (not just an empty list) + tasks_list = [t for t in node.tasks if t.name not in GRAPH_LEVEL_NAMES] + if len(tasks_list) > 0: + is_graph = True + nested_wg = node + + # Method 3: Check if executor is a WorkGraph instance + if not is_graph and isinstance(executor, WorkGraph): + is_graph = True + nested_wg = executor + + if is_graph and nested_wg is not None: + # This is a nested workflow - write it to a separate file + _nested_counter["count"] += 1 + nested_filename = f"nested_{_nested_counter['count']}.json" + nested_path = parent_dir / nested_filename + + # Recursively write the nested workflow + write_workflow_json(nested_wg, str(nested_path), _nested_counter) + + data[NODES_LABEL].append( + {"id": i, "type": "workflow", "value": nested_filename} + ) + else: + # This is a regular function task + # Try to get the module path from different sources + module_path = executor.module_path + + # If module_path is None, try to extract from pickled_callable + if module_path is None and hasattr(executor, "pickled_callable"): + # For pickled callables, try to get the original function + try: + import cloudpickle + + func = cloudpickle.loads(executor.pickled_callable) + if hasattr(func, "__module__"): + module_path = func.__module__ + except Exception: + pass # Keep module_path as None + + callable_name = f"{module_path}.{executor.callable_name}" + data[NODES_LABEL].append( + {"id": i, "type": "function", "value": callable_name} + ) + i += 1 + # Handle workflow-level inputs (create input nodes) + input_name_mapping = {} + INTERNAL_SOCKETS = [ + "metadata", + "_wait", + "_outputs", + "function_data", + "function_inputs", + ] + + # First, try to get default values from graph_inputs task (for SubGraphTasks) + graph_inputs_defaults = {} + for task in wg.tasks: + if task.name == "graph_inputs" and hasattr(task, "outputs"): + for output in task.outputs: + if hasattr(output, "_name") and hasattr(output, "value"): + output_name = output._name + if output.value is not None and isinstance(output.value, orm.Data): + if isinstance(output.value, orm.List): + graph_inputs_defaults[output_name] = output.value.get_list() + elif isinstance(output.value, orm.Dict): + val = output.value.get_dict() + val.pop("node_type", None) + graph_inputs_defaults[output_name] = val + else: + val = output.value.value + # Convert float to int if it's a whole number + if isinstance(val, float) and val.is_integer(): + val = int(val) + graph_inputs_defaults[output_name] = val + + if ( + hasattr(wg, "inputs") + and wg.inputs is not None + and hasattr(wg.inputs, "_sockets") + ): + for input_name, input_socket in wg.inputs._sockets.items(): + # Skip metadata and other special namespaces/internal sockets + if isinstance(input_socket, TaskSocketNamespace): + continue + if input_name in INTERNAL_SOCKETS or input_name.startswith("_"): + continue + + # Check if this input has a default value + # First try graph_inputs defaults, then the socket value + input_value = None + if input_name in graph_inputs_defaults: + input_value = graph_inputs_defaults[input_name] + elif hasattr(input_socket, "value") and input_socket.value is not None: + if isinstance(input_socket.value, orm.Data): + if isinstance(input_socket.value, orm.List): + input_value = input_socket.value.get_list() + elif isinstance(input_socket.value, orm.Dict): + input_value = input_socket.value.get_dict() + input_value.pop("node_type", None) + else: + input_value = input_socket.value.value + # Convert float to int if it's a whole number + if isinstance(input_value, float) and input_value.is_integer(): + input_value = int(input_value) + + # Create input node + node_data = {"id": i, "type": "input", "name": input_name} + if input_value is not None: + node_data["value"] = input_value + data[NODES_LABEL].append(node_data) + input_name_mapping[input_name] = i + i += 1 + + # Handle workflow-level outputs (create output nodes) + output_name_mapping = {} + if ( + hasattr(wg, "outputs") + and wg.outputs is not None + and hasattr(wg.outputs, "_sockets") + ): + for output_name, output_socket in wg.outputs._sockets.items(): + # Skip metadata and other special namespaces/internal sockets + if isinstance(output_socket, TaskSocketNamespace): + continue + if output_name in INTERNAL_SOCKETS or output_name.startswith("_"): + continue + + data[NODES_LABEL].append({"id": i, "type": "output", "name": output_name}) + output_name_mapping[output_name] = i + i += 1 + for link in wg.links: link_data = link.to_dict() - # if the from socket is the default result, we set it to None - if link_data["from_socket"] == "result": - link_data["from_socket"] = None - link_data[TARGET_LABEL] = node_name_mapping[link_data.pop("to_node")] - link_data[TARGET_PORT_LABEL] = link_data.pop("to_socket") - link_data[SOURCE_LABEL] = node_name_mapping[link_data.pop("from_node")] - link_data[SOURCE_PORT_LABEL] = link_data.pop("from_socket") - data[EDGES_LABEL].append(link_data) + from_node_name = link_data.pop("from_node") + to_node_name = link_data.pop("to_node") + from_socket = link_data.pop("from_socket") + to_socket = link_data.pop("to_socket") + + # Handle links from graph_inputs + if from_node_name == "graph_inputs": + if from_socket in input_name_mapping: + link_data[SOURCE_LABEL] = input_name_mapping[from_socket] + link_data[SOURCE_PORT_LABEL] = None + else: + continue + else: + link_data[SOURCE_LABEL] = node_name_mapping.get(from_node_name) + # if the from socket is the default result, we set it to None + link_data[SOURCE_PORT_LABEL] = ( + None if from_socket == "result" else from_socket + ) + + # Handle links to graph_outputs + if to_node_name == "graph_outputs": + if to_socket in output_name_mapping: + link_data[TARGET_LABEL] = output_name_mapping[to_socket] + link_data[TARGET_PORT_LABEL] = None + else: + continue + else: + link_data[TARGET_LABEL] = node_name_mapping.get(to_node_name) + link_data[TARGET_PORT_LABEL] = to_socket + + # Only add link if both source and target are valid + if link_data[SOURCE_LABEL] is not None and link_data[TARGET_LABEL] is not None: + data[EDGES_LABEL].append(link_data) + + # Build set of links that are already handled (to avoid duplicates) + existing_links = { + (link[SOURCE_LABEL], link[TARGET_LABEL], link[TARGET_PORT_LABEL]) + for link in data[EDGES_LABEL] + } for node in wg.tasks: + if node.name in GRAPH_LEVEL_NAMES: + continue + for input in node.inputs: # assume namespace is not used as input if isinstance(input, TaskSocketNamespace): continue if isinstance(input.value, orm.Data): + # Check if this input is already connected (e.g., from workflow inputs) + node_id = node_name_mapping[node.name] + if any( + link[1] == node_id and link[2] == input._name + for link in existing_links + ): + continue + if input.value.uuid not in data_node_name_mapping: if isinstance(input.value, orm.List): raw_value = input.value.get_list() @@ -130,6 +448,9 @@ def write_workflow_json(wg: WorkGraph, file_name: str) -> dict: raw_value.pop("node_type", None) else: raw_value = input.value.value + # Convert float to int if it's a whole number + if isinstance(raw_value, float) and raw_value.is_integer(): + raw_value = int(raw_value) data[NODES_LABEL].append( {"id": i, "type": "input", "value": raw_value} ) @@ -146,8 +467,27 @@ def write_workflow_json(wg: WorkGraph, file_name: str) -> dict: SOURCE_PORT_LABEL: None, } ) + existing_links.add( + (input_node_name, node_name_mapping[node.name], input._name) + ) data[VERSION_LABEL] = VERSION_NUMBER - PythonWorkflowDefinitionWorkflow( - **set_result_node(workflow_dict=update_node_names(workflow_dict=data)) - ).dump_json_file(file_name=file_name, indent=2) + + # Check if we have named input nodes (from workflow-level inputs) + has_named_inputs = any( + node.get("type") == "input" and "name" in node for node in data[NODES_LABEL] + ) + has_output_nodes = any(node.get("type") == "output" for node in data[NODES_LABEL]) + + if has_named_inputs or has_output_nodes: + # New-style workflow with exposed inputs/outputs - names are already set, don't rename + workflow_data = data + else: + # Old-style workflow - need to update names and add result node + workflow_data = set_result_node( + workflow_dict=update_node_names(workflow_dict=data) + ) + + PythonWorkflowDefinitionWorkflow(**workflow_data).dump_json_file( + file_name=file_name, indent=2 + ) diff --git a/src/python_workflow_definition/models.py b/src/python_workflow_definition/models.py index 4980cfa..91d04c6 100644 --- a/src/python_workflow_definition/models.py +++ b/src/python_workflow_definition/models.py @@ -64,12 +64,34 @@ def check_value_format(cls, v: str): return v +class PythonWorkflowDefinitionWorklowNode(PythonWorkflowDefinitionBaseNode): + """ + Model for function execution nodes. + The 'name' attribute is computed automatically from 'value'. + """ + + type: Literal["workflow"] + value: str # Expected format: 'module.function' + + @field_validator("value") + @classmethod + def check_value_format(cls, v: str): + if not v or "." not in v or v.startswith(".") or v.endswith("."): + msg = ( + "FunctionNode 'value' must be a non-empty string ", + "in 'module.function' format with at least one period.", + ) + raise ValueError(msg) + return v + + # Discriminated Union for Nodes PythonWorkflowDefinitionNode = Annotated[ Union[ PythonWorkflowDefinitionInputNode, PythonWorkflowDefinitionOutputNode, PythonWorkflowDefinitionFunctionNode, + PythonWorkflowDefinitionWorklowNode, ], Field(discriminator="type"), ] diff --git a/src/python_workflow_definition/shared.py b/src/python_workflow_definition/shared.py index ef7bb23..9d1c5b6 100644 --- a/src/python_workflow_definition/shared.py +++ b/src/python_workflow_definition/shared.py @@ -6,7 +6,7 @@ SOURCE_PORT_LABEL = "sourcePort" TARGET_LABEL = "target" TARGET_PORT_LABEL = "targetPort" -VERSION_NUMBER = "0.1.0" +VERSION_NUMBER = "0.1.1" VERSION_LABEL = "version"