diff --git a/example_workflows/arithmetic/aiida.ipynb b/example_workflows/arithmetic/aiida.ipynb index 6faaa43..0741c99 100644 --- a/example_workflows/arithmetic/aiida.ipynb +++ b/example_workflows/arithmetic/aiida.ipynb @@ -1,103 +1,135 @@ { - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.8" - } - }, - "nbformat_minor": 4, - "nbformat": 4, "cells": [ { "cell_type": "markdown", - "source": "# Aiida", - "metadata": {} + "metadata": {}, + "source": [ + "# Aiida" + ] }, { "cell_type": "markdown", - "source": "## Define workflow with aiida", - "metadata": {} + "metadata": {}, + "source": [ + "## Define workflow with aiida" + ] }, { "cell_type": "code", - "source": "from python_workflow_definition.aiida import write_workflow_json\n\nfrom aiida_workgraph import WorkGraph, task\nfrom aiida import orm, load_profile\nload_profile()\n\nworkflow_json_filename = \"aiida_simple.json\"", + "execution_count": 1, "metadata": { "trusted": true }, "outputs": [], - "execution_count": 1 + "source": [ + "from python_workflow_definition.aiida import write_workflow_json\n", + "\n", + "from aiida_workgraph import WorkGraph, task\n", + "from aiida import orm, load_profile\n", + "load_profile()\n", + "\n", + "workflow_json_filename = \"aiida_simple.json\"" + ] }, { - "metadata": {}, "cell_type": "code", - "outputs": [], "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "from workflow import (\n", " get_sum as _get_sum,\n", " get_prod_and_div as _get_prod_and_div,\n", + " get_square as _get_square,\n", ")" ] }, { - "metadata": {}, "cell_type": "code", - "outputs": [], "execution_count": null, - "source": "wg = WorkGraph(\"arithmetic\")" + "metadata": {}, + "outputs": [], + "source": [ + "wg = WorkGraph(\"arithmetic\")" + ] }, { "cell_type": "code", - "source": "get_prod_and_div_task = wg.add_task(\n task(outputs=['prod', 'div'])(_get_prod_and_div),\n name=\"get_prod_and_div\",\n x=orm.Float(1),\n y=orm.Float(2),\n)", + "execution_count": 4, "metadata": { "trusted": true }, "outputs": [], - "execution_count": 4 + "source": [ + "get_prod_and_div_task = wg.add_task(\n", + " task(outputs=['prod', 'div'])(_get_prod_and_div),\n", + " name=\"get_prod_and_div\",\n", + " x=orm.Float(1),\n", + " y=orm.Float(2),\n", + ")" + ] }, { "cell_type": "code", - "source": "get_sum_task = wg.add_task(\n _get_sum,\n name=\"get_sum\",\n x=get_prod_and_div_task.outputs.prod,\n y=get_prod_and_div_task.outputs.div,\n)", + "execution_count": null, "metadata": { "trusted": true }, "outputs": [], - "execution_count": 5 + "source": [ + "get_sum_task = wg.add_task(\n", + " _get_sum,\n", + " name=\"get_sum\",\n", + " x=get_prod_and_div_task.outputs.prod,\n", + " y=get_prod_and_div_task.outputs.div,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "get_square_task = wg.add_task(\n", + " _get_square,\n", + " name=\"get_square\",\n", + " x=get_sum_task.outputs.result,\n", + ")" + ] }, { "cell_type": "code", - "source": "write_workflow_json(wg=wg, file_name=workflow_json_filename)", + "execution_count": 6, "metadata": { "trusted": true }, "outputs": [ { - "execution_count": 6, - "output_type": "execute_result", "data": { - "text/plain": "{'nodes': [{'id': 0, 'function': 'arithmetic_workflow.get_prod_and_div'},\n {'id': 1, 'function': 'arithmetic_workflow.get_sum'},\n {'id': 2, 'value': 1.0},\n {'id': 3, 'value': 2.0}],\n 'edges': [{'target': 1, 'targetPort': 'x', 'source': 0, 'sourcePort': 'prod'},\n {'target': 1, 'targetPort': 'y', 'source': 0, 'sourcePort': 'div'},\n {'target': 0, 'targetPort': 'x', 'source': 2, 'sourcePort': None},\n {'target': 0, 'targetPort': 'y', 'source': 3, 'sourcePort': None}]}" + "text/plain": [ + "{'nodes': [{'id': 0, 'function': 'arithmetic_workflow.get_prod_and_div'},\n", + " {'id': 1, 'function': 'arithmetic_workflow.get_sum'},\n", + " {'id': 2, 'value': 1.0},\n", + " {'id': 3, 'value': 2.0}],\n", + " 'edges': [{'target': 1, 'targetPort': 'x', 'source': 0, 'sourcePort': 'prod'},\n", + " {'target': 1, 'targetPort': 'y', 'source': 0, 'sourcePort': 'div'},\n", + " {'target': 0, 'targetPort': 'x', 'source': 2, 'sourcePort': None},\n", + " {'target': 0, 'targetPort': 'y', 'source': 3, 'sourcePort': None}]}" + ] }, - "metadata": {} + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" } ], - "execution_count": 6 + "source": [ + "write_workflow_json(wg=wg, file_name=workflow_json_filename)" + ] }, { "cell_type": "code", - "source": "!cat {workflow_json_filename}", + "execution_count": 7, "metadata": { "trusted": true }, @@ -105,19 +137,70 @@ { "name": "stdout", "output_type": "stream", - "text": "{\n \"nodes\": [\n {\n \"id\": 0,\n \"function\": \"arithmetic_workflow.get_prod_and_div\"\n },\n {\n \"id\": 1,\n \"function\": \"arithmetic_workflow.get_sum\"\n },\n {\n \"id\": 2,\n \"value\": 1.0\n },\n {\n \"id\": 3,\n \"value\": 2.0\n }\n ],\n \"edges\": [\n {\n \"target\": 1,\n \"targetPort\": \"x\",\n \"source\": 0,\n \"sourcePort\": \"prod\"\n },\n {\n \"target\": 1,\n \"targetPort\": \"y\",\n \"source\": 0,\n \"sourcePort\": \"div\"\n },\n {\n \"target\": 0,\n \"targetPort\": \"x\",\n \"source\": 2,\n \"sourcePort\": null\n },\n {\n \"target\": 0,\n \"targetPort\": \"y\",\n \"source\": 3,\n \"sourcePort\": null\n }\n ]\n}" + "text": [ + "{\n", + " \"nodes\": [\n", + " {\n", + " \"id\": 0,\n", + " \"function\": \"arithmetic_workflow.get_prod_and_div\"\n", + " },\n", + " {\n", + " \"id\": 1,\n", + " \"function\": \"arithmetic_workflow.get_sum\"\n", + " },\n", + " {\n", + " \"id\": 2,\n", + " \"value\": 1.0\n", + " },\n", + " {\n", + " \"id\": 3,\n", + " \"value\": 2.0\n", + " }\n", + " ],\n", + " \"edges\": [\n", + " {\n", + " \"target\": 1,\n", + " \"targetPort\": \"x\",\n", + " \"source\": 0,\n", + " \"sourcePort\": \"prod\"\n", + " },\n", + " {\n", + " \"target\": 1,\n", + " \"targetPort\": \"y\",\n", + " \"source\": 0,\n", + " \"sourcePort\": \"div\"\n", + " },\n", + " {\n", + " \"target\": 0,\n", + " \"targetPort\": \"x\",\n", + " \"source\": 2,\n", + " \"sourcePort\": null\n", + " },\n", + " {\n", + " \"target\": 0,\n", + " \"targetPort\": \"y\",\n", + " \"source\": 3,\n", + " \"sourcePort\": null\n", + " }\n", + " ]\n", + "}" + ] } ], - "execution_count": 7 + "source": [ + "!cat {workflow_json_filename}" + ] }, { "cell_type": "markdown", - "source": "## Load Workflow with jobflow", - "metadata": {} + "metadata": {}, + "source": [ + "## Load Workflow with jobflow" + ] }, { "cell_type": "code", - "source": "from python_workflow_definition.jobflow import load_workflow_json", + "execution_count": 8, "metadata": { "trusted": true }, @@ -125,32 +208,43 @@ { "name": "stderr", "output_type": "stream", - "text": "/srv/conda/envs/notebook/lib/python3.12/site-packages/paramiko/pkey.py:82: CryptographyDeprecationWarning: TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.\n \"cipher\": algorithms.TripleDES,\n/srv/conda/envs/notebook/lib/python3.12/site-packages/paramiko/transport.py:253: CryptographyDeprecationWarning: TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.\n \"class\": algorithms.TripleDES,\n" + "text": [ + "/srv/conda/envs/notebook/lib/python3.12/site-packages/paramiko/pkey.py:82: CryptographyDeprecationWarning: TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.\n", + " \"cipher\": algorithms.TripleDES,\n", + "/srv/conda/envs/notebook/lib/python3.12/site-packages/paramiko/transport.py:253: CryptographyDeprecationWarning: TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.\n", + " \"class\": algorithms.TripleDES,\n" + ] } ], - "execution_count": 8 + "source": [ + "from python_workflow_definition.jobflow import load_workflow_json" + ] }, { "cell_type": "code", - "source": "from jobflow.managers.local import run_locally", + "execution_count": 9, "metadata": { "trusted": true }, "outputs": [], - "execution_count": 9 + "source": [ + "from jobflow.managers.local import run_locally" + ] }, { "cell_type": "code", - "source": "flow = load_workflow_json(file_name=workflow_json_filename)", + "execution_count": 10, "metadata": { "trusted": true }, "outputs": [], - "execution_count": 10 + "source": [ + "flow = load_workflow_json(file_name=workflow_json_filename)" + ] }, { "cell_type": "code", - "source": "result = run_locally(flow)\nresult", + "execution_count": 11, "metadata": { "trusted": true }, @@ -158,54 +252,148 @@ { "name": "stdout", "output_type": "stream", - "text": "2025-04-23 09:27:52,129 INFO Started executing jobs locally\n2025-04-23 09:27:52,561 INFO Starting job - get_prod_and_div (6eae194f-04a4-4c7c-b0d9-baf624bac375)\n2025-04-23 09:27:52,564 INFO Finished job - get_prod_and_div (6eae194f-04a4-4c7c-b0d9-baf624bac375)\n2025-04-23 09:27:52,564 INFO Starting job - get_sum (21fda98d-7701-45c7-a30a-816e9a91ab75)\n2025-04-23 09:27:52,566 INFO Finished job - get_sum (21fda98d-7701-45c7-a30a-816e9a91ab75)\n2025-04-23 09:27:52,566 INFO Finished executing jobs locally\n" + "text": [ + "2025-04-23 09:27:52,129 INFO Started executing jobs locally\n", + "2025-04-23 09:27:52,561 INFO Starting job - get_prod_and_div (6eae194f-04a4-4c7c-b0d9-baf624bac375)\n", + "2025-04-23 09:27:52,564 INFO Finished job - get_prod_and_div (6eae194f-04a4-4c7c-b0d9-baf624bac375)\n", + "2025-04-23 09:27:52,564 INFO Starting job - get_sum (21fda98d-7701-45c7-a30a-816e9a91ab75)\n", + "2025-04-23 09:27:52,566 INFO Finished job - get_sum (21fda98d-7701-45c7-a30a-816e9a91ab75)\n", + "2025-04-23 09:27:52,566 INFO Finished executing jobs locally\n" + ] }, { - "execution_count": 11, - "output_type": "execute_result", "data": { - "text/plain": "{'6eae194f-04a4-4c7c-b0d9-baf624bac375': {1: Response(output={'prod': 2.0, 'div': 0.5}, detour=None, addition=None, replace=None, stored_data=None, stop_children=False, stop_jobflow=False, job_dir=PosixPath('/home/jovyan'))},\n '21fda98d-7701-45c7-a30a-816e9a91ab75': {1: Response(output=2.5, detour=None, addition=None, replace=None, stored_data=None, stop_children=False, stop_jobflow=False, job_dir=PosixPath('/home/jovyan'))}}" + "text/plain": [ + "{'6eae194f-04a4-4c7c-b0d9-baf624bac375': {1: Response(output={'prod': 2.0, 'div': 0.5}, detour=None, addition=None, replace=None, stored_data=None, stop_children=False, stop_jobflow=False, job_dir=PosixPath('/home/jovyan'))},\n", + " '21fda98d-7701-45c7-a30a-816e9a91ab75': {1: Response(output=2.5, detour=None, addition=None, replace=None, stored_data=None, stop_children=False, stop_jobflow=False, job_dir=PosixPath('/home/jovyan'))}}" + ] }, - "metadata": {} + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" } ], - "execution_count": 11 + "source": [ + "result = run_locally(flow)\n", + "result" + ] }, { "cell_type": "markdown", - "source": "## Load Workflow with pyiron_base", - "metadata": {} + "metadata": {}, + "source": [ + "## Load Workflow with pyiron_base" + ] }, { "cell_type": "code", - "source": "from python_workflow_definition.pyiron_base import load_workflow_json", + "execution_count": 12, "metadata": { "trusted": true }, "outputs": [], - "execution_count": 12 + "source": [ + "from python_workflow_definition.pyiron_base import load_workflow_json" + ] }, { "cell_type": "code", - "source": "delayed_object_lst = load_workflow_json(file_name=workflow_json_filename)\ndelayed_object_lst[-1].draw()", + "execution_count": 13, "metadata": { "trusted": true }, "outputs": [ { - "output_type": "display_data", "data": { - "text/plain": "", - "image/svg+xml": "\n\n\n\n\ncreate_function_job_92b349b68e1d28facd2c0cc67fea0894\n\ncreate_function_job=<pyiron_base.project.delayed.DelayedObject object at 0x7407d4c7d100>\n\n\n\nx_c8e04d7c4ce41e51517caabdd70267c4\n\nx=<pyiron_base.project.delayed.DelayedObject object at 0x7407ca519a90>\n\n\n\nx_c8e04d7c4ce41e51517caabdd70267c4->create_function_job_92b349b68e1d28facd2c0cc67fea0894\n\n\n\n\n\nx_974924ff998969b91bfc8b2db5f854a1\n\nx=1.0\n\n\n\nx_974924ff998969b91bfc8b2db5f854a1->x_c8e04d7c4ce41e51517caabdd70267c4\n\n\n\n\n\ny_6c83c1caef87634071eb9102b68e73cf\n\ny=<pyiron_base.project.delayed.DelayedObject object at 0x7407bf835610>\n\n\n\nx_974924ff998969b91bfc8b2db5f854a1->y_6c83c1caef87634071eb9102b68e73cf\n\n\n\n\n\ny_6c83c1caef87634071eb9102b68e73cf->create_function_job_92b349b68e1d28facd2c0cc67fea0894\n\n\n\n\n\ny_f12a7f1986b9dd058dfc666dbe230b20\n\ny=2.0\n\n\n\ny_f12a7f1986b9dd058dfc666dbe230b20->x_c8e04d7c4ce41e51517caabdd70267c4\n\n\n\n\n\ny_f12a7f1986b9dd058dfc666dbe230b20->y_6c83c1caef87634071eb9102b68e73cf\n\n\n\n\n" + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "create_function_job_92b349b68e1d28facd2c0cc67fea0894\n", + "\n", + "create_function_job=<pyiron_base.project.delayed.DelayedObject object at 0x7407d4c7d100>\n", + "\n", + "\n", + "\n", + "x_c8e04d7c4ce41e51517caabdd70267c4\n", + "\n", + "x=<pyiron_base.project.delayed.DelayedObject object at 0x7407ca519a90>\n", + "\n", + "\n", + "\n", + "x_c8e04d7c4ce41e51517caabdd70267c4->create_function_job_92b349b68e1d28facd2c0cc67fea0894\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "x_974924ff998969b91bfc8b2db5f854a1\n", + "\n", + "x=1.0\n", + "\n", + "\n", + "\n", + "x_974924ff998969b91bfc8b2db5f854a1->x_c8e04d7c4ce41e51517caabdd70267c4\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "y_6c83c1caef87634071eb9102b68e73cf\n", + "\n", + "y=<pyiron_base.project.delayed.DelayedObject object at 0x7407bf835610>\n", + "\n", + "\n", + "\n", + "x_974924ff998969b91bfc8b2db5f854a1->y_6c83c1caef87634071eb9102b68e73cf\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "y_6c83c1caef87634071eb9102b68e73cf->create_function_job_92b349b68e1d28facd2c0cc67fea0894\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "y_f12a7f1986b9dd058dfc666dbe230b20\n", + "\n", + "y=2.0\n", + "\n", + "\n", + "\n", + "y_f12a7f1986b9dd058dfc666dbe230b20->x_c8e04d7c4ce41e51517caabdd70267c4\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "y_f12a7f1986b9dd058dfc666dbe230b20->y_6c83c1caef87634071eb9102b68e73cf\n", + "\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "" + ] }, - "metadata": {} + "metadata": {}, + "output_type": "display_data" } ], - "execution_count": 13 + "source": [ + "delayed_object_lst = load_workflow_json(file_name=workflow_json_filename)\n", + "delayed_object_lst[-1].draw()" + ] }, { "cell_type": "code", - "source": "delayed_object_lst[-1].pull()", + "execution_count": 14, "metadata": { "trusted": true }, @@ -213,18 +401,46 @@ { "name": "stdout", "output_type": "stream", - "text": "The job get_prod_and_div_7c19c6233dddb02781fc331f609558a1 was saved and received the ID: 1\nThe job get_sum_92b8e0a2d45fff6b04940562a8d7d8f7 was saved and received the ID: 2\n" + "text": [ + "The job get_prod_and_div_7c19c6233dddb02781fc331f609558a1 was saved and received the ID: 1\n", + "The job get_sum_92b8e0a2d45fff6b04940562a8d7d8f7 was saved and received the ID: 2\n" + ] }, { - "execution_count": 14, - "output_type": "execute_result", "data": { - "text/plain": "2.5" + "text/plain": [ + "2.5" + ] }, - "metadata": {} + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" } ], - "execution_count": 14 + "source": [ + "delayed_object_lst[-1].pull()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.8" } - ] + }, + "nbformat": 4, + "nbformat_minor": 4 } diff --git a/example_workflows/arithmetic/jobflow.ipynb b/example_workflows/arithmetic/jobflow.ipynb index 32be475..58c782d 100644 --- a/example_workflows/arithmetic/jobflow.ipynb +++ b/example_workflows/arithmetic/jobflow.ipynb @@ -1 +1,565 @@ -{"metadata":{"kernelspec":{"display_name":"Python 3 (ipykernel)","language":"python","name":"python3"},"language_info":{"name":"python","version":"3.12.8","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"}},"nbformat_minor":5,"nbformat":4,"cells":[{"id":"982a4fbe-7cf9-45dd-84ae-9854149db0b9","cell_type":"markdown","source":"# jobflow","metadata":{}},{"id":"e6180712-d081-45c7-ba41-fc5191f10427","cell_type":"markdown","source":"## Define workflow with jobflow\n\nThis tutorial will demonstrate how to use the PWD with `jobflow` and load the workflow with `aiida` and `pyiron`.\n\n[`jobflow`](https://joss.theoj.org/papers/10.21105/joss.05995) was developed to simplify the development of high-throughput workflows. It uses a decorator-based approach to define the “Job“s that can be connected to form complex workflows (“Flow“s). `jobflow` is the workflow language of the workflow library [`atomate2`](https://chemrxiv.org/engage/chemrxiv/article-details/678e76a16dde43c9085c75e9), designed to replace [atomate](https://www.sciencedirect.com/science/article/pii/S0927025617303919), which was central to the development of the [Materials Project](https://pubs.aip.org/aip/apm/article/1/1/011002/119685/Commentary-The-Materials-Project-A-materials) database.","metadata":{}},{"id":"69bedfb9ec12c092","cell_type":"markdown","source":"First, we start by importing the job decorator and the Flow class from jobflow, as welll as the necessary modules from the python workflow definition and the example arithmetic workflow.","metadata":{"collapsed":false,"jupyter":{"outputs_hidden":false}}},{"id":"000bbd4a-f53c-4eea-9d85-76f0aa2ca10b","cell_type":"code","source":"from jobflow import job, Flow","metadata":{"ExecuteTime":{"end_time":"2025-04-24T12:51:34.747117656Z","start_time":"2025-04-24T12:51:33.203979325Z"},"trusted":true},"outputs":[{"name":"stderr","output_type":"stream","text":"/srv/conda/envs/notebook/lib/python3.12/site-packages/paramiko/pkey.py:82: CryptographyDeprecationWarning: TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.\n \"cipher\": algorithms.TripleDES,\n/srv/conda/envs/notebook/lib/python3.12/site-packages/paramiko/transport.py:253: CryptographyDeprecationWarning: TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.\n \"class\": algorithms.TripleDES,\n"}],"execution_count":1},{"id":"06c2bd9e-b2ac-4b88-9158-fa37331c3418","cell_type":"code","source":"from python_workflow_definition.jobflow import write_workflow_json","metadata":{"trusted":true},"outputs":[],"execution_count":2},{"id":"f9217ce7b093b5fc","cell_type":"code","source":"from workflow import (\n get_sum as _get_sum,\n get_prod_and_div as _get_prod_and_div,\n)","metadata":{"trusted":true},"outputs":[],"execution_count":3},{"id":"2639deadfae9c591","cell_type":"markdown","source":"Using the job object decorator, the imported functions from the arithmetic workflow are transformed into jobflow “Job”s. These “Job”s can delay the execution of Python functions and can be chained into workflows (“Flow”s). A “Job” can return serializable outputs (e.g., a number, a dictionary, or a Pydantic model) or a so-called “Response” object, which enables the execution of dynamic workflows where the number of nodes is not known prior to the workflow’s execution. ","metadata":{"collapsed":false,"jupyter":{"outputs_hidden":false}}},{"id":"1feba0898ee4e361","cell_type":"code","source":"workflow_json_filename = \"jobflow_simple.json\"","metadata":{"ExecuteTime":{"end_time":"2025-04-24T10:30:04.751088Z","start_time":"2025-04-24T10:30:04.748941Z"},"trusted":true},"outputs":[],"execution_count":4},{"id":"07598344-0f75-433b-8902-bea21a42088c","cell_type":"code","source":"get_sum = job(_get_sum)\n# Note: one could also transfer the outputs to the datastore as well: get_prod_and_div = job(_get_prod_and_div, data=[\"prod\", \"div\"])\n# On the way from the general definition to the jobflow definition, we do this automatically to avoid overflow databases.\nget_prod_and_div = job(_get_prod_and_div)","metadata":{"trusted":true},"outputs":[],"execution_count":5},{"id":"ecef1ed5-a8d3-48c3-9e01-4a40e55c1153","cell_type":"code","source":"obj = get_prod_and_div(x=1, y=2)","metadata":{"trusted":true},"outputs":[],"execution_count":6},{"id":"2b88a30a-e26b-4802-89b7-79ca08cc0af9","cell_type":"code","source":"w = get_sum(x=obj.output.prod, y=obj.output.div)","metadata":{"trusted":true},"outputs":[],"execution_count":7},{"id":"a5e5ca63-2906-47c9-bac6-adebf8643cba","cell_type":"code","source":"flow = Flow([obj, w])","metadata":{"trusted":true},"outputs":[],"execution_count":8},{"id":"27688edd256f1420","cell_type":"markdown","source":"As jobflow itself is only a workflow language, the workflows are typically executed on high-performance computers with a workflow manager such as [Fireworks](https://onlinelibrary.wiley.com/doi/full/10.1002/cpe.3505) or [jobflow-remote](https://github.com/Matgenix/jobflow-remote). For smaller and test workflows, simple linear, non-parallel execution of the workflow graph can be performed with jobflow itself. All outputs of individual jobs are saved in a database. For high-throughput applications typically, a MongoDB database is used. For testing and smaller workflows, a memory database can be used instead.","metadata":{"collapsed":false,"jupyter":{"outputs_hidden":false}}},{"id":"e464da97-16a1-4772-9a07-0a47f152781d","cell_type":"code","source":"write_workflow_json(flow=flow, file_name=workflow_json_filename)","metadata":{"trusted":true},"outputs":[],"execution_count":9},{"id":"bca646b2-0a9a-4271-966a-e5903a8c9031","cell_type":"code","source":"!cat {workflow_json_filename}","metadata":{"trusted":true},"outputs":[{"name":"stdout","output_type":"stream","text":"{\"nodes\": [{\"id\": 0, \"function\": \"workflow.get_prod_and_div\"}, {\"id\": 1, \"function\": \"workflow.get_sum\"}, {\"id\": 2, \"value\": 1}, {\"id\": 3, \"value\": 2}], \"edges\": [{\"target\": 0, \"targetPort\": \"x\", \"source\": 2, \"sourcePort\": null}, {\"target\": 0, \"targetPort\": \"y\", \"source\": 3, \"sourcePort\": null}, {\"target\": 1, \"targetPort\": \"x\", \"source\": 0, \"sourcePort\": \"prod\"}, {\"target\": 1, \"targetPort\": \"y\", \"source\": 0, \"sourcePort\": \"div\"}]}"}],"execution_count":10},{"id":"65389ef27c38fdec","cell_type":"markdown","source":"Finally, you can write the workflow data into a JSON file to be imported later.","metadata":{"collapsed":false,"jupyter":{"outputs_hidden":false}}},{"id":"87a27540-c390-4d34-ae75-4739bfc4c1b7","cell_type":"markdown","source":"## Load Workflow with aiida\n\nIn this part, we will demonstrate how to import the `jobflow` workflow into `aiida` via the PWD.","metadata":{}},{"id":"66a1b3a6-3d3b-4caa-b58f-d8bc089b1074","cell_type":"code","source":"from aiida import load_profile\n\nload_profile()","metadata":{"trusted":true},"outputs":[{"execution_count":11,"output_type":"execute_result","data":{"text/plain":"Profile"},"metadata":{}}],"execution_count":11},{"id":"4679693b-039b-45cf-8c67-5b2b3d705a83","cell_type":"code","source":"from python_workflow_definition.aiida import load_workflow_json","metadata":{"trusted":true},"outputs":[],"execution_count":12},{"id":"cc7127193d31d8ef","cell_type":"markdown","source":"We import the necessary modules from `aiida` and the PWD, as well as the workflow JSON file.","metadata":{"collapsed":false,"jupyter":{"outputs_hidden":false}}},{"id":"68c41a61-d185-47e8-ba31-eeff71d8b2c6","cell_type":"code","source":"wg = load_workflow_json(file_name=workflow_json_filename)\nwg","metadata":{"trusted":true},"outputs":[{"execution_count":13,"output_type":"execute_result","data":{"text/plain":"NodeGraphWidget(settings={'minimap': True}, style={'width': '90%', 'height': '600px'}, value={'name': 'WorkGra…","application/vnd.jupyter.widget-view+json":{"version_major":2,"version_minor":1,"model_id":"2bbe8584e19148d79f7be020abafa3eb"}},"metadata":{}}],"execution_count":13},{"id":"4816325767559bbe","cell_type":"markdown","source":"Finally, we are now able to run the workflow with `aiida`.","metadata":{"collapsed":false,"jupyter":{"outputs_hidden":false}}},{"id":"05228ece-643c-420c-8df8-4ce3df379515","cell_type":"code","source":"wg.run()","metadata":{"trusted":true},"outputs":[{"name":"stderr","output_type":"stream","text":"04/24/2025 02:55:50 PM <99> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [3|WorkGraphEngine|continue_workgraph]: tasks ready to run: get_prod_and_div1\n04/24/2025 02:55:50 PM <99> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [3|WorkGraphEngine|update_task_state]: Task: get_prod_and_div1, type: PyFunction, finished.\n04/24/2025 02:55:50 PM <99> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [3|WorkGraphEngine|continue_workgraph]: tasks ready to run: get_sum2\n04/24/2025 02:55:51 PM <99> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [3|WorkGraphEngine|update_task_state]: Task: get_sum2, type: PyFunction, finished.\n04/24/2025 02:55:51 PM <99> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [3|WorkGraphEngine|continue_workgraph]: tasks ready to run: \n04/24/2025 02:55:51 PM <99> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [3|WorkGraphEngine|finalize]: Finalize workgraph.\n"}],"execution_count":14},{"id":"2c942094-61b4-4e94-859a-64f87b5bec64","cell_type":"markdown","source":"## Load Workflow with pyiron_base\n\nIn this part, we will demonstrate how to import the `jobflow` workflow into `pyiron` via the PWD.","metadata":{}},{"id":"ea102341-84f7-4156-a7d1-c3ab1ea613a5","cell_type":"code","source":"from python_workflow_definition.pyiron_base import load_workflow_json","metadata":{"trusted":true},"outputs":[],"execution_count":15},{"id":"8f2a621d-b533-4ddd-8bcd-c22db2f922ec","cell_type":"code","source":"delayed_object_lst = load_workflow_json(file_name=workflow_json_filename)\ndelayed_object_lst[-1].draw()","metadata":{"trusted":true},"outputs":[{"output_type":"display_data","data":{"text/plain":"","image/svg+xml":"\n\n\n\n\ncreate_function_job_c942e1812492f8750b853ea68445d43f\n\ncreate_function_job=<pyiron_base.project.delayed.DelayedObject object at 0x719ee1a6ee70>\n\n\n\nx_5a41f1a01c281872a92d14b5bf806b3c\n\nx=<pyiron_base.project.delayed.DelayedObject object at 0x719ee39de8d0>\n\n\n\nx_5a41f1a01c281872a92d14b5bf806b3c->create_function_job_c942e1812492f8750b853ea68445d43f\n\n\n\n\n\nx_1d847da32ecaabf6731c38f798c3d4ce\n\nx=1\n\n\n\nx_1d847da32ecaabf6731c38f798c3d4ce->x_5a41f1a01c281872a92d14b5bf806b3c\n\n\n\n\n\ny_2f79d27085d95c7ddb45d9566ae33dfb\n\ny=<pyiron_base.project.delayed.DelayedObject object at 0x719ee1d5d940>\n\n\n\nx_1d847da32ecaabf6731c38f798c3d4ce->y_2f79d27085d95c7ddb45d9566ae33dfb\n\n\n\n\n\ny_2f79d27085d95c7ddb45d9566ae33dfb->create_function_job_c942e1812492f8750b853ea68445d43f\n\n\n\n\n\ny_a9ec4f5f33f0d64e74ed5d9900bceac6\n\ny=2\n\n\n\ny_a9ec4f5f33f0d64e74ed5d9900bceac6->x_5a41f1a01c281872a92d14b5bf806b3c\n\n\n\n\n\ny_a9ec4f5f33f0d64e74ed5d9900bceac6->y_2f79d27085d95c7ddb45d9566ae33dfb\n\n\n\n\n"},"metadata":{}}],"execution_count":16},{"id":"cf80267d-c2b0-4236-bf1d-a57596985fc1","cell_type":"code","source":"delayed_object_lst[-1].pull()","metadata":{"trusted":true},"outputs":[{"name":"stdout","output_type":"stream","text":"The job get_prod_and_div_00cf2c787390eacfbc4a51e9a0c38ec7 was saved and received the ID: 1\nThe job get_sum_4b5b9d16b259a13b6a32798ce2779af8 was saved and received the ID: 2\n"},{"execution_count":17,"output_type":"execute_result","data":{"text/plain":"2.5"},"metadata":{}}],"execution_count":17},{"id":"9414680d1cbc3b2e","cell_type":"markdown","source":"Here, the procedure is the same as before: Import the necessary `pyiron_base` module from the PWD, import the workflow JSON file and run the workflow with pyiron.","metadata":{"collapsed":false,"jupyter":{"outputs_hidden":false}}},{"id":"c199b28f3c0399cc","cell_type":"code","source":"","metadata":{"collapsed":false,"trusted":true,"jupyter":{"outputs_hidden":false}},"outputs":[],"execution_count":null}]} +{ + "cells": [ + { + "cell_type": "markdown", + "id": "982a4fbe-7cf9-45dd-84ae-9854149db0b9", + "metadata": {}, + "source": [ + "# jobflow" + ] + }, + { + "cell_type": "markdown", + "id": "e6180712-d081-45c7-ba41-fc5191f10427", + "metadata": {}, + "source": [ + "## Define workflow with jobflow\n", + "\n", + "This tutorial will demonstrate how to use the PWD with `jobflow` and load the workflow with `aiida` and `pyiron`.\n", + "\n", + "[`jobflow`](https://joss.theoj.org/papers/10.21105/joss.05995) was developed to simplify the development of high-throughput workflows. It uses a decorator-based approach to define the “Job“s that can be connected to form complex workflows (“Flow“s). `jobflow` is the workflow language of the workflow library [`atomate2`](https://chemrxiv.org/engage/chemrxiv/article-details/678e76a16dde43c9085c75e9), designed to replace [atomate](https://www.sciencedirect.com/science/article/pii/S0927025617303919), which was central to the development of the [Materials Project](https://pubs.aip.org/aip/apm/article/1/1/011002/119685/Commentary-The-Materials-Project-A-materials) database." + ] + }, + { + "cell_type": "markdown", + "id": "69bedfb9ec12c092", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "source": [ + "First, we start by importing the job decorator and the Flow class from jobflow, as welll as the necessary modules from the python workflow definition and the example arithmetic workflow." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "000bbd4a-f53c-4eea-9d85-76f0aa2ca10b", + "metadata": { + "ExecuteTime": { + "end_time": "2025-04-24T12:51:34.747117656Z", + "start_time": "2025-04-24T12:51:33.203979325Z" + }, + "trusted": true + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/srv/conda/envs/notebook/lib/python3.12/site-packages/paramiko/pkey.py:82: CryptographyDeprecationWarning: TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.\n", + " \"cipher\": algorithms.TripleDES,\n", + "/srv/conda/envs/notebook/lib/python3.12/site-packages/paramiko/transport.py:253: CryptographyDeprecationWarning: TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.\n", + " \"class\": algorithms.TripleDES,\n" + ] + } + ], + "source": [ + "from jobflow import job, Flow" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "06c2bd9e-b2ac-4b88-9158-fa37331c3418", + "metadata": { + "trusted": true + }, + "outputs": [], + "source": [ + "from python_workflow_definition.jobflow import write_workflow_json" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f9217ce7b093b5fc", + "metadata": { + "trusted": true + }, + "outputs": [], + "source": [ + "from workflow import (\n", + " get_sum as _get_sum,\n", + " get_prod_and_div as _get_prod_and_div,\n", + " get_square as _get_square,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "2639deadfae9c591", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "source": [ + "Using the job object decorator, the imported functions from the arithmetic workflow are transformed into jobflow “Job”s. These “Job”s can delay the execution of Python functions and can be chained into workflows (“Flow”s). A “Job” can return serializable outputs (e.g., a number, a dictionary, or a Pydantic model) or a so-called “Response” object, which enables the execution of dynamic workflows where the number of nodes is not known prior to the workflow’s execution. " + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "1feba0898ee4e361", + "metadata": { + "ExecuteTime": { + "end_time": "2025-04-24T10:30:04.751088Z", + "start_time": "2025-04-24T10:30:04.748941Z" + }, + "trusted": true + }, + "outputs": [], + "source": [ + "workflow_json_filename = \"jobflow_simple.json\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "07598344-0f75-433b-8902-bea21a42088c", + "metadata": { + "trusted": true + }, + "outputs": [], + "source": [ + "get_sum = job(_get_sum)\n", + "# Note: one could also transfer the outputs to the datastore as well: get_prod_and_div = job(_get_prod_and_div, data=[\"prod\", \"div\"])\n", + "# On the way from the general definition to the jobflow definition, we do this automatically to avoid overflow databases.\n", + "get_prod_and_div = job(_get_prod_and_div)\n", + "get_square = job(_get_square)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ecef1ed5-a8d3-48c3-9e01-4a40e55c1153", + "metadata": { + "trusted": true + }, + "outputs": [], + "source": [ + "prod_and_div = get_prod_and_div(x=1, y=2)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2b88a30a-e26b-4802-89b7-79ca08cc0af9", + "metadata": { + "trusted": true + }, + "outputs": [], + "source": [ + "tmp_sum = get_sum(x=prod_and_div.output.prod, y=prod_and_div.output.div)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8d95c3aa", + "metadata": {}, + "outputs": [], + "source": [ + "result = get_square(x=tmp_sum.output)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a5e5ca63-2906-47c9-bac6-adebf8643cba", + "metadata": { + "trusted": true + }, + "outputs": [], + "source": [ + "flow = Flow([prod_and_div, tmp_sum, result])" + ] + }, + { + "cell_type": "markdown", + "id": "27688edd256f1420", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "source": [ + "As jobflow itself is only a workflow language, the workflows are typically executed on high-performance computers with a workflow manager such as [Fireworks](https://onlinelibrary.wiley.com/doi/full/10.1002/cpe.3505) or [jobflow-remote](https://github.com/Matgenix/jobflow-remote). For smaller and test workflows, simple linear, non-parallel execution of the workflow graph can be performed with jobflow itself. All outputs of individual jobs are saved in a database. For high-throughput applications typically, a MongoDB database is used. For testing and smaller workflows, a memory database can be used instead." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "e464da97-16a1-4772-9a07-0a47f152781d", + "metadata": { + "trusted": true + }, + "outputs": [], + "source": [ + "write_workflow_json(flow=flow, file_name=workflow_json_filename)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "bca646b2-0a9a-4271-966a-e5903a8c9031", + "metadata": { + "trusted": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\"nodes\": [{\"id\": 0, \"function\": \"workflow.get_prod_and_div\"}, {\"id\": 1, \"function\": \"workflow.get_sum\"}, {\"id\": 2, \"value\": 1}, {\"id\": 3, \"value\": 2}], \"edges\": [{\"target\": 0, \"targetPort\": \"x\", \"source\": 2, \"sourcePort\": null}, {\"target\": 0, \"targetPort\": \"y\", \"source\": 3, \"sourcePort\": null}, {\"target\": 1, \"targetPort\": \"x\", \"source\": 0, \"sourcePort\": \"prod\"}, {\"target\": 1, \"targetPort\": \"y\", \"source\": 0, \"sourcePort\": \"div\"}]}" + ] + } + ], + "source": [ + "!cat {workflow_json_filename}" + ] + }, + { + "cell_type": "markdown", + "id": "65389ef27c38fdec", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "source": [ + "Finally, you can write the workflow data into a JSON file to be imported later." + ] + }, + { + "cell_type": "markdown", + "id": "87a27540-c390-4d34-ae75-4739bfc4c1b7", + "metadata": {}, + "source": [ + "## Load Workflow with aiida\n", + "\n", + "In this part, we will demonstrate how to import the `jobflow` workflow into `aiida` via the PWD." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "66a1b3a6-3d3b-4caa-b58f-d8bc089b1074", + "metadata": { + "trusted": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "Profile" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from aiida import load_profile\n", + "\n", + "load_profile()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "4679693b-039b-45cf-8c67-5b2b3d705a83", + "metadata": { + "trusted": true + }, + "outputs": [], + "source": [ + "from python_workflow_definition.aiida import load_workflow_json" + ] + }, + { + "cell_type": "markdown", + "id": "cc7127193d31d8ef", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "source": [ + "We import the necessary modules from `aiida` and the PWD, as well as the workflow JSON file." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "68c41a61-d185-47e8-ba31-eeff71d8b2c6", + "metadata": { + "trusted": true + }, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "2bbe8584e19148d79f7be020abafa3eb", + "version_major": 2, + "version_minor": 1 + }, + "text/plain": [ + "NodeGraphWidget(settings={'minimap': True}, style={'width': '90%', 'height': '600px'}, value={'name': 'WorkGra…" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "wg = load_workflow_json(file_name=workflow_json_filename)\n", + "wg" + ] + }, + { + "cell_type": "markdown", + "id": "4816325767559bbe", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "source": [ + "Finally, we are now able to run the workflow with `aiida`." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "05228ece-643c-420c-8df8-4ce3df379515", + "metadata": { + "trusted": true + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "04/24/2025 02:55:50 PM <99> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [3|WorkGraphEngine|continue_workgraph]: tasks ready to run: get_prod_and_div1\n", + "04/24/2025 02:55:50 PM <99> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [3|WorkGraphEngine|update_task_state]: Task: get_prod_and_div1, type: PyFunction, finished.\n", + "04/24/2025 02:55:50 PM <99> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [3|WorkGraphEngine|continue_workgraph]: tasks ready to run: get_sum2\n", + "04/24/2025 02:55:51 PM <99> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [3|WorkGraphEngine|update_task_state]: Task: get_sum2, type: PyFunction, finished.\n", + "04/24/2025 02:55:51 PM <99> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [3|WorkGraphEngine|continue_workgraph]: tasks ready to run: \n", + "04/24/2025 02:55:51 PM <99> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [3|WorkGraphEngine|finalize]: Finalize workgraph.\n" + ] + } + ], + "source": [ + "wg.run()" + ] + }, + { + "cell_type": "markdown", + "id": "2c942094-61b4-4e94-859a-64f87b5bec64", + "metadata": {}, + "source": [ + "## Load Workflow with pyiron_base\n", + "\n", + "In this part, we will demonstrate how to import the `jobflow` workflow into `pyiron` via the PWD." + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "ea102341-84f7-4156-a7d1-c3ab1ea613a5", + "metadata": { + "trusted": true + }, + "outputs": [], + "source": [ + "from python_workflow_definition.pyiron_base import load_workflow_json" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "8f2a621d-b533-4ddd-8bcd-c22db2f922ec", + "metadata": { + "trusted": true + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "create_function_job_c942e1812492f8750b853ea68445d43f\n", + "\n", + "create_function_job=<pyiron_base.project.delayed.DelayedObject object at 0x719ee1a6ee70>\n", + "\n", + "\n", + "\n", + "x_5a41f1a01c281872a92d14b5bf806b3c\n", + "\n", + "x=<pyiron_base.project.delayed.DelayedObject object at 0x719ee39de8d0>\n", + "\n", + "\n", + "\n", + "x_5a41f1a01c281872a92d14b5bf806b3c->create_function_job_c942e1812492f8750b853ea68445d43f\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "x_1d847da32ecaabf6731c38f798c3d4ce\n", + "\n", + "x=1\n", + "\n", + "\n", + "\n", + "x_1d847da32ecaabf6731c38f798c3d4ce->x_5a41f1a01c281872a92d14b5bf806b3c\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "y_2f79d27085d95c7ddb45d9566ae33dfb\n", + "\n", + "y=<pyiron_base.project.delayed.DelayedObject object at 0x719ee1d5d940>\n", + "\n", + "\n", + "\n", + "x_1d847da32ecaabf6731c38f798c3d4ce->y_2f79d27085d95c7ddb45d9566ae33dfb\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "y_2f79d27085d95c7ddb45d9566ae33dfb->create_function_job_c942e1812492f8750b853ea68445d43f\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "y_a9ec4f5f33f0d64e74ed5d9900bceac6\n", + "\n", + "y=2\n", + "\n", + "\n", + "\n", + "y_a9ec4f5f33f0d64e74ed5d9900bceac6->x_5a41f1a01c281872a92d14b5bf806b3c\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "y_a9ec4f5f33f0d64e74ed5d9900bceac6->y_2f79d27085d95c7ddb45d9566ae33dfb\n", + "\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "delayed_object_lst = load_workflow_json(file_name=workflow_json_filename)\n", + "delayed_object_lst[-1].draw()" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "cf80267d-c2b0-4236-bf1d-a57596985fc1", + "metadata": { + "trusted": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The job get_prod_and_div_00cf2c787390eacfbc4a51e9a0c38ec7 was saved and received the ID: 1\n", + "The job get_sum_4b5b9d16b259a13b6a32798ce2779af8 was saved and received the ID: 2\n" + ] + }, + { + "data": { + "text/plain": [ + "2.5" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "delayed_object_lst[-1].pull()" + ] + }, + { + "cell_type": "markdown", + "id": "9414680d1cbc3b2e", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "source": [ + "Here, the procedure is the same as before: Import the necessary `pyiron_base` module from the PWD, import the workflow JSON file and run the workflow with pyiron." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c199b28f3c0399cc", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, + "trusted": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.8" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/example_workflows/arithmetic/pyiron_base.ipynb b/example_workflows/arithmetic/pyiron_base.ipynb index abb63ec..b355109 100644 --- a/example_workflows/arithmetic/pyiron_base.ipynb +++ b/example_workflows/arithmetic/pyiron_base.ipynb @@ -1,123 +1,133 @@ { - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "name": "python", - "version": "3.12.8", - "mimetype": "text/x-python", - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "pygments_lexer": "ipython3", - "nbconvert_exporter": "python", - "file_extension": ".py" - } - }, - "nbformat_minor": 5, - "nbformat": 4, "cells": [ { - "id": "c39b76fb-259f-4e16-a44d-02a295c82386", "cell_type": "markdown", - "source": "# pyiron", - "metadata": {} + "id": "c39b76fb-259f-4e16-a44d-02a295c82386", + "metadata": {}, + "source": [ + "# pyiron" + ] }, { - "id": "3638419b-a0cb-49e2-b157-7fbb1acde90f", "cell_type": "markdown", - "source": "## Define workflow with pyiron_base", - "metadata": {} + "id": "3638419b-a0cb-49e2-b157-7fbb1acde90f", + "metadata": {}, + "source": [ + "## Define workflow with pyiron_base" + ] }, { - "id": "b4a78447-e87c-4fb4-8d17-d9a280eb7254", "cell_type": "code", - "source": "from pyiron_base import job", + "execution_count": 1, + "id": "b4a78447-e87c-4fb4-8d17-d9a280eb7254", "metadata": { "trusted": true }, "outputs": [], - "execution_count": 1 + "source": [ + "from pyiron_base import job" + ] }, { - "id": "06c2bd9e-b2ac-4b88-9158-fa37331c3418", "cell_type": "code", - "source": "from python_workflow_definition.pyiron_base import write_workflow_json", + "execution_count": 2, + "id": "06c2bd9e-b2ac-4b88-9158-fa37331c3418", "metadata": { "trusted": true }, "outputs": [], - "execution_count": 2 + "source": [ + "from python_workflow_definition.pyiron_base import write_workflow_json" + ] }, { - "metadata": {}, "cell_type": "code", - "outputs": [], "execution_count": null, + "id": "6d859dfff0c2df5c", + "metadata": {}, + "outputs": [], "source": [ "from workflow import (\n", " get_sum as _get_sum,\n", " get_prod_and_div as _get_prod_and_div,\n", + " get_square as _get_square,\n", ")" - ], - "id": "6d859dfff0c2df5c" + ] }, { - "metadata": {}, "cell_type": "code", - "outputs": [], "execution_count": null, - "source": "workflow_json_filename = \"pyiron_arithmetic.json\"", - "id": "77135b0c61898507" + "id": "77135b0c61898507", + "metadata": {}, + "outputs": [], + "source": [ + "workflow_json_filename = \"pyiron_arithmetic.json\"" + ] }, { - "id": "07598344-0f75-433b-8902-bea21a42088c", "cell_type": "code", - "source": "get_sum = job(_get_sum)\nget_prod_and_div = job(_get_prod_and_div, output_key_lst=[\"prod\", \"div\"])", + "execution_count": null, + "id": "07598344-0f75-433b-8902-bea21a42088c", "metadata": { "trusted": true }, "outputs": [], - "execution_count": 5 + "source": [ + "get_sum = job(_get_sum)\n", + "get_prod_and_div = job(_get_prod_and_div, output_key_lst=[\"prod\", \"div\"])\n", + "get_square = job(_get_square)" + ] }, { - "id": "2b88a30a-e26b-4802-89b7-79ca08cc0af9", "cell_type": "code", - "source": "obj = get_prod_and_div(x=1, y=2)", + "execution_count": null, + "id": "2b88a30a-e26b-4802-89b7-79ca08cc0af9", "metadata": { "trusted": true }, "outputs": [], - "execution_count": 6 + "source": [ + "prod_and_div = get_prod_and_div(x=1, y=2)" + ] }, { - "id": "a5e5ca63-2906-47c9-bac6-adebf8643cba", "cell_type": "code", - "source": "w = get_sum(x=obj.output.prod, y=obj.output.div)", + "execution_count": null, + "id": "a5e5ca63-2906-47c9-bac6-adebf8643cba", "metadata": { "trusted": true }, "outputs": [], - "execution_count": 7 + "source": [ + "tmp_sum = get_sum(x=prod_and_div.output.prod, y=prod_and_div.output.div)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eae321f1", + "metadata": {}, + "outputs": [], + "source": [ + "result = get_square(x=tmp_sum)" + ] }, { - "id": "e464da97-16a1-4772-9a07-0a47f152781d", "cell_type": "code", - "source": "write_workflow_json(delayed_object=w, file_name=workflow_json_filename)", + "execution_count": null, + "id": "e464da97-16a1-4772-9a07-0a47f152781d", "metadata": { "trusted": true }, "outputs": [], - "execution_count": 8 + "source": [ + "write_workflow_json(delayed_object=result, file_name=workflow_json_filename)" + ] }, { - "id": "bca646b2-0a9a-4271-966a-e5903a8c9031", "cell_type": "code", - "source": "!cat {workflow_json_filename}", + "execution_count": 9, + "id": "bca646b2-0a9a-4271-966a-e5903a8c9031", "metadata": { "trusted": true }, @@ -125,74 +135,93 @@ { "name": "stdout", "output_type": "stream", - "text": "{\"nodes\": [{\"id\": 0, \"function\": \"arithmetic_workflow.get_sum\"}, {\"id\": 1, \"function\": \"arithmetic_workflow.get_prod_and_div\"}, {\"id\": 2, \"value\": 1}, {\"id\": 3, \"value\": 2}], \"edges\": [{\"target\": 0, \"targetPort\": \"x\", \"source\": 1, \"sourcePort\": \"prod\"}, {\"target\": 1, \"targetPort\": \"x\", \"source\": 2, \"sourcePort\": null}, {\"target\": 1, \"targetPort\": \"y\", \"source\": 3, \"sourcePort\": null}, {\"target\": 0, \"targetPort\": \"y\", \"source\": 1, \"sourcePort\": \"div\"}]}" + "text": [ + "{\"nodes\": [{\"id\": 0, \"function\": \"arithmetic_workflow.get_sum\"}, {\"id\": 1, \"function\": \"arithmetic_workflow.get_prod_and_div\"}, {\"id\": 2, \"value\": 1}, {\"id\": 3, \"value\": 2}], \"edges\": [{\"target\": 0, \"targetPort\": \"x\", \"source\": 1, \"sourcePort\": \"prod\"}, {\"target\": 1, \"targetPort\": \"x\", \"source\": 2, \"sourcePort\": null}, {\"target\": 1, \"targetPort\": \"y\", \"source\": 3, \"sourcePort\": null}, {\"target\": 0, \"targetPort\": \"y\", \"source\": 1, \"sourcePort\": \"div\"}]}" + ] } ], - "execution_count": 9 + "source": [ + "!cat {workflow_json_filename}" + ] }, { - "id": "a4c0faaf-e30d-4ded-8e9f-57f97f51b14c", "cell_type": "markdown", - "source": "## Load Workflow with aiida", - "metadata": {} + "id": "a4c0faaf-e30d-4ded-8e9f-57f97f51b14c", + "metadata": {}, + "source": [ + "## Load Workflow with aiida" + ] }, { - "id": "2ecc229f-daa1-49b9-9279-a6b5ae1aa4f2", "cell_type": "code", - "source": "from aiida import load_profile\n\nload_profile()", + "execution_count": 10, + "id": "2ecc229f-daa1-49b9-9279-a6b5ae1aa4f2", "metadata": { "trusted": true }, "outputs": [ { - "execution_count": 10, - "output_type": "execute_result", "data": { - "text/plain": "Profile" + "text/plain": [ + "Profile" + ] }, - "metadata": {} + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" } ], - "execution_count": 10 + "source": [ + "from aiida import load_profile\n", + "\n", + "load_profile()" + ] }, { - "id": "68a56b32-9f99-43d7-aaee-0c1cd9522681", "cell_type": "code", - "source": "from python_workflow_definition.aiida import load_workflow_json", + "execution_count": 11, + "id": "68a56b32-9f99-43d7-aaee-0c1cd9522681", "metadata": { "trusted": true }, "outputs": [], - "execution_count": 11 + "source": [ + "from python_workflow_definition.aiida import load_workflow_json" + ] }, { - "id": "8f2a621d-b533-4ddd-8bcd-c22db2f922ec", "cell_type": "code", - "source": "wg = load_workflow_json(file_name=workflow_json_filename)\nwg", + "execution_count": 12, + "id": "8f2a621d-b533-4ddd-8bcd-c22db2f922ec", "metadata": { "trusted": true }, "outputs": [ { - "execution_count": 12, - "output_type": "execute_result", "data": { - "text/plain": "NodeGraphWidget(settings={'minimap': True}, style={'width': '90%', 'height': '600px'}, value={'name': 'WorkGra…", "application/vnd.jupyter.widget-view+json": { + "model_id": "cb6aafce370b43bb9904c64f03c93cc0", "version_major": 2, - "version_minor": 1, - "model_id": "cb6aafce370b43bb9904c64f03c93cc0" - } + "version_minor": 1 + }, + "text/plain": [ + "NodeGraphWidget(settings={'minimap': True}, style={'width': '90%', 'height': '600px'}, value={'name': 'WorkGra…" + ] }, - "metadata": {} + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" } ], - "execution_count": 12 + "source": [ + "wg = load_workflow_json(file_name=workflow_json_filename)\n", + "wg" + ] }, { - "id": "cf80267d-c2b0-4236-bf1d-a57596985fc1", "cell_type": "code", - "source": "\nwg.run()", + "execution_count": 13, + "id": "cf80267d-c2b0-4236-bf1d-a57596985fc1", "metadata": { "trusted": true }, @@ -200,21 +229,33 @@ { "name": "stderr", "output_type": "stream", - "text": "04/23/2025 09:45:47 AM <4032> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [138|WorkGraphEngine|continue_workgraph]: tasks ready to run: get_prod_and_div2\n04/23/2025 09:45:48 AM <4032> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [138|WorkGraphEngine|update_task_state]: Task: get_prod_and_div2, type: PyFunction, finished.\n04/23/2025 09:45:48 AM <4032> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [138|WorkGraphEngine|continue_workgraph]: tasks ready to run: get_sum1\n04/23/2025 09:45:48 AM <4032> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [138|WorkGraphEngine|update_task_state]: Task: get_sum1, type: PyFunction, finished.\n04/23/2025 09:45:48 AM <4032> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [138|WorkGraphEngine|continue_workgraph]: tasks ready to run: \n04/23/2025 09:45:48 AM <4032> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [138|WorkGraphEngine|finalize]: Finalize workgraph.\n" + "text": [ + "04/23/2025 09:45:47 AM <4032> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [138|WorkGraphEngine|continue_workgraph]: tasks ready to run: get_prod_and_div2\n", + "04/23/2025 09:45:48 AM <4032> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [138|WorkGraphEngine|update_task_state]: Task: get_prod_and_div2, type: PyFunction, finished.\n", + "04/23/2025 09:45:48 AM <4032> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [138|WorkGraphEngine|continue_workgraph]: tasks ready to run: get_sum1\n", + "04/23/2025 09:45:48 AM <4032> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [138|WorkGraphEngine|update_task_state]: Task: get_sum1, type: PyFunction, finished.\n", + "04/23/2025 09:45:48 AM <4032> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [138|WorkGraphEngine|continue_workgraph]: tasks ready to run: \n", + "04/23/2025 09:45:48 AM <4032> aiida.orm.nodes.process.workflow.workchain.WorkChainNode: [REPORT] [138|WorkGraphEngine|finalize]: Finalize workgraph.\n" + ] } ], - "execution_count": 13 + "source": [ + "\n", + "wg.run()" + ] }, { - "id": "0c3503e1-0a32-40e1-845d-3fd9ec3c4c19", "cell_type": "markdown", - "source": "## Load Workflow with jobflow", - "metadata": {} + "id": "0c3503e1-0a32-40e1-845d-3fd9ec3c4c19", + "metadata": {}, + "source": [ + "## Load Workflow with jobflow" + ] }, { - "id": "4abb0481-8e38-479d-ae61-6c46d091653e", "cell_type": "code", - "source": "from python_workflow_definition.jobflow import load_workflow_json", + "execution_count": 14, + "id": "4abb0481-8e38-479d-ae61-6c46d091653e", "metadata": { "trusted": true }, @@ -222,35 +263,46 @@ { "name": "stderr", "output_type": "stream", - "text": "/srv/conda/envs/notebook/lib/python3.12/site-packages/paramiko/pkey.py:82: CryptographyDeprecationWarning: TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.\n \"cipher\": algorithms.TripleDES,\n/srv/conda/envs/notebook/lib/python3.12/site-packages/paramiko/transport.py:253: CryptographyDeprecationWarning: TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.\n \"class\": algorithms.TripleDES,\n" + "text": [ + "/srv/conda/envs/notebook/lib/python3.12/site-packages/paramiko/pkey.py:82: CryptographyDeprecationWarning: TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.\n", + " \"cipher\": algorithms.TripleDES,\n", + "/srv/conda/envs/notebook/lib/python3.12/site-packages/paramiko/transport.py:253: CryptographyDeprecationWarning: TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.\n", + " \"class\": algorithms.TripleDES,\n" + ] } ], - "execution_count": 14 + "source": [ + "from python_workflow_definition.jobflow import load_workflow_json" + ] }, { - "id": "8253dd7c283bf3f7", "cell_type": "code", - "source": "from jobflow.managers.local import run_locally", + "execution_count": 15, + "id": "8253dd7c283bf3f7", "metadata": { "trusted": true }, "outputs": [], - "execution_count": 15 + "source": [ + "from jobflow.managers.local import run_locally" + ] }, { - "id": "4b45e83b-945f-48c5-8e20-9df0ce0a14a1", "cell_type": "code", - "source": "flow = load_workflow_json(file_name=workflow_json_filename)", + "execution_count": 16, + "id": "4b45e83b-945f-48c5-8e20-9df0ce0a14a1", "metadata": { "trusted": true }, "outputs": [], - "execution_count": 16 + "source": [ + "flow = load_workflow_json(file_name=workflow_json_filename)" + ] }, { - "id": "8665c39c-220c-4982-b738-c31f6460530f", "cell_type": "code", - "source": "result = run_locally(flow)\nresult", + "execution_count": 17, + "id": "8665c39c-220c-4982-b738-c31f6460530f", "metadata": { "trusted": true }, @@ -258,28 +310,62 @@ { "name": "stdout", "output_type": "stream", - "text": "2025-04-23 09:45:49,174 INFO Started executing jobs locally\n2025-04-23 09:45:49,540 INFO Starting job - get_prod_and_div (77d76447-21f8-4ad8-b2b5-bef7263ef5b9)\n2025-04-23 09:45:49,543 INFO Finished job - get_prod_and_div (77d76447-21f8-4ad8-b2b5-bef7263ef5b9)\n2025-04-23 09:45:49,544 INFO Starting job - get_sum (e61e0bc0-bddd-4ac9-93f0-cf5f064e17b0)\n2025-04-23 09:45:49,546 INFO Finished job - get_sum (e61e0bc0-bddd-4ac9-93f0-cf5f064e17b0)\n2025-04-23 09:45:49,546 INFO Finished executing jobs locally\n" + "text": [ + "2025-04-23 09:45:49,174 INFO Started executing jobs locally\n", + "2025-04-23 09:45:49,540 INFO Starting job - get_prod_and_div (77d76447-21f8-4ad8-b2b5-bef7263ef5b9)\n", + "2025-04-23 09:45:49,543 INFO Finished job - get_prod_and_div (77d76447-21f8-4ad8-b2b5-bef7263ef5b9)\n", + "2025-04-23 09:45:49,544 INFO Starting job - get_sum (e61e0bc0-bddd-4ac9-93f0-cf5f064e17b0)\n", + "2025-04-23 09:45:49,546 INFO Finished job - get_sum (e61e0bc0-bddd-4ac9-93f0-cf5f064e17b0)\n", + "2025-04-23 09:45:49,546 INFO Finished executing jobs locally\n" + ] }, { - "execution_count": 17, - "output_type": "execute_result", "data": { - "text/plain": "{'77d76447-21f8-4ad8-b2b5-bef7263ef5b9': {1: Response(output={'prod': 2, 'div': 0.5}, detour=None, addition=None, replace=None, stored_data=None, stop_children=False, stop_jobflow=False, job_dir=PosixPath('/home/jovyan'))},\n 'e61e0bc0-bddd-4ac9-93f0-cf5f064e17b0': {1: Response(output=2.5, detour=None, addition=None, replace=None, stored_data=None, stop_children=False, stop_jobflow=False, job_dir=PosixPath('/home/jovyan'))}}" + "text/plain": [ + "{'77d76447-21f8-4ad8-b2b5-bef7263ef5b9': {1: Response(output={'prod': 2, 'div': 0.5}, detour=None, addition=None, replace=None, stored_data=None, stop_children=False, stop_jobflow=False, job_dir=PosixPath('/home/jovyan'))},\n", + " 'e61e0bc0-bddd-4ac9-93f0-cf5f064e17b0': {1: Response(output=2.5, detour=None, addition=None, replace=None, stored_data=None, stop_children=False, stop_jobflow=False, job_dir=PosixPath('/home/jovyan'))}}" + ] }, - "metadata": {} + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" } ], - "execution_count": 17 + "source": [ + "result = run_locally(flow)\n", + "result" + ] }, { - "id": "cf76f305-24de-45a7-be8e-cfe45cd6458e", "cell_type": "code", - "source": "", + "execution_count": null, + "id": "cf76f305-24de-45a7-be8e-cfe45cd6458e", "metadata": { "trusted": true }, "outputs": [], - "execution_count": null + "source": [] } - ] + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.8" + } + }, + "nbformat": 4, + "nbformat_minor": 5 } diff --git a/example_workflows/arithmetic/workflow.json b/example_workflows/arithmetic/workflow.json index 4ad5c86..4124a7d 100644 --- a/example_workflows/arithmetic/workflow.json +++ b/example_workflows/arithmetic/workflow.json @@ -3,15 +3,17 @@ "nodes": [ {"id": 0, "type": "function", "value": "workflow.get_prod_and_div"}, {"id": 1, "type": "function", "value": "workflow.get_sum"}, - {"id": 2, "type": "input", "value": 1, "name": "x"}, - {"id": 3, "type": "input", "value": 2, "name": "y"}, - {"id": 4, "type": "output", "name": "result"} + {"id": 2, "type": "function", "value": "workflow.get_square"}, + {"id": 3, "type": "input", "value": 1, "name": "x"}, + {"id": 4, "type": "input", "value": 2, "name": "y"}, + {"id": 5, "type": "output", "name": "result"} ], "edges": [ - {"target": 0, "targetPort": "x", "source": 2, "sourcePort": null}, - {"target": 0, "targetPort": "y", "source": 3, "sourcePort": null}, + {"target": 0, "targetPort": "x", "source": 3, "sourcePort": null}, + {"target": 0, "targetPort": "y", "source": 4, "sourcePort": null}, {"target": 1, "targetPort": "x", "source": 0, "sourcePort": "prod"}, {"target": 1, "targetPort": "y", "source": 0, "sourcePort": "div"}, - {"target": 4, "targetPort": null, "source": 1, "sourcePort": null} + {"target": 2, "targetPort": "x", "source": 1, "sourcePort": null}, + {"target": 5, "targetPort": null, "source": 2, "sourcePort": null} ] } diff --git a/example_workflows/arithmetic/workflow.py b/example_workflows/arithmetic/workflow.py index 3a12993..1a2e4c3 100644 --- a/example_workflows/arithmetic/workflow.py +++ b/example_workflows/arithmetic/workflow.py @@ -4,3 +4,7 @@ def get_prod_and_div(x, y): def get_sum(x, y): return x + y + + +def get_square(x): + return x ** 2