From 7b5448eb52b6bbe1d20e82d1b45f98e6d60a6164 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 26 Feb 2025 10:53:26 -0800 Subject: [PATCH] Replace "semantic" with "lexical" In all contexts pertaining to the semantics.py code. Signed-off-by: liamhuber --- notebooks/deepdive.ipynb | 82 +++++++------- notebooks/hpc_example.ipynb | 40 +++---- notebooks/quickstart.ipynb | 4 +- pyiron_workflow/channels.py | 2 +- .../mixin/{semantics.py => lexical.py} | 100 +++++++++--------- pyiron_workflow/node.py | 28 ++--- pyiron_workflow/nodes/composite.py | 4 +- pyiron_workflow/storage.py | 4 +- .../{test_semantics.py => test_lexical.py} | 88 +++++++-------- tests/unit/nodes/test_composite.py | 4 +- tests/unit/test_node.py | 2 +- 11 files changed, 178 insertions(+), 180 deletions(-) rename pyiron_workflow/mixin/{semantics.py => lexical.py} (79%) rename tests/unit/mixin/{test_semantics.py => test_lexical.py} (56%) diff --git a/notebooks/deepdive.ipynb b/notebooks/deepdive.ipynb index b67c1414e..2b8b6a0cc 100644 --- a/notebooks/deepdive.ipynb +++ b/notebooks/deepdive.ipynb @@ -1897,18 +1897,18 @@ { "data": { "text/plain": [ - "\u001b[0;31mType:\u001b[0m property\n", - "\u001b[0;31mString form:\u001b[0m \n", - "\u001b[0;31mSource:\u001b[0m \n", - "\u001b[0;31m# pwf.standard_nodes.If.emitting_channels.fget\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m\u001b[0;34m@\u001b[0m\u001b[0mproperty\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m\u001b[0;32mdef\u001b[0m \u001b[0memitting_channels\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0mtuple\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mOutputSignal\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moutputs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtruth\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalue\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0mNOT_DATA\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0memitting_channels\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0;32melif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moutputs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtruth\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0memitting_channels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msignals\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moutput\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0memitting_channels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msignals\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moutput\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m" + "\u001B[0;31mType:\u001B[0m property\n", + "\u001B[0;31mString form:\u001B[0m \n", + "\u001B[0;31mSource:\u001B[0m \n", + "\u001B[0;31m# pwf.standard_nodes.If.emitting_channels.fget\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m\u001B[0;34m@\u001B[0m\u001B[0mproperty\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m\u001B[0;32mdef\u001B[0m \u001B[0memitting_channels\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mself\u001B[0m\u001B[0;34m)\u001B[0m \u001B[0;34m->\u001B[0m \u001B[0mtuple\u001B[0m\u001B[0;34m[\u001B[0m\u001B[0mOutputSignal\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m \u001B[0;32mif\u001B[0m \u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0moutputs\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mtruth\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mvalue\u001B[0m \u001B[0;32mis\u001B[0m \u001B[0mNOT_DATA\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m \u001B[0;32mreturn\u001B[0m \u001B[0msuper\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0memitting_channels\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m \u001B[0;32melif\u001B[0m \u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0moutputs\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mtruth\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mvalue\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m \u001B[0;32mreturn\u001B[0m \u001B[0;34m(\u001B[0m\u001B[0;34m*\u001B[0m\u001B[0msuper\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0memitting_channels\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0msignals\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0moutput\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mtrue\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m \u001B[0;32melse\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m \u001B[0;32mreturn\u001B[0m \u001B[0;34m(\u001B[0m\u001B[0;34m*\u001B[0m\u001B[0msuper\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0memitting_channels\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0msignals\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0moutput\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mfalse\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m" ] }, "metadata": {}, @@ -2634,34 +2634,34 @@ { "data": { "text/plain": [ - "\u001b[0;31mSignature:\u001b[0m \u001b[0mRunnable\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_parse_executor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mexecutor\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0;34m'StdLibExecutor'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mSource:\u001b[0m \n", - " \u001b[0;34m@\u001b[0m\u001b[0mstaticmethod\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_parse_executor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mexecutor\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0mStdLibExecutor\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0;34m\"\"\"\u001b[0m\n", - "\u001b[0;34m We may want to allow users to specify how to build an executor rather than\u001b[0m\n", - "\u001b[0;34m actually providing an executor instance -- so here we can interpret these.\u001b[0m\n", - "\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m NOTE:\u001b[0m\n", - "\u001b[0;34m `concurrent.futures.Executor` _won't_ actually work, because we need\u001b[0m\n", - "\u001b[0;34m stuff with :mod:`cloudpickle` support. We're leaning on this for a guaranteed\u001b[0m\n", - "\u001b[0;34m interface (has `submit` and returns a `Future`), and leaving it to the user\u001b[0m\n", - "\u001b[0;34m to provide an executor that will actually work!!!\u001b[0m\n", - "\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m NOTE:\u001b[0m\n", - "\u001b[0;34m If, in the future, this parser is extended to instantiate new executors from\u001b[0m\n", - "\u001b[0;34m instructions, these new instances may not be caught by the\u001b[0m\n", - "\u001b[0;34m `executor_shutdown` method. This will require some re-engineering to make\u001b[0m\n", - "\u001b[0;34m sure we don't have dangling executors.\u001b[0m\n", - "\u001b[0;34m \"\"\"\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mexecutor\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mStdLibExecutor\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mexecutor\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mNotImplementedError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0;34mf\"Expected an instance of {StdLibExecutor}, but got {executor}.\"\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mFile:\u001b[0m ~/work/pyiron/pyiron_workflow/pyiron_workflow/mixin/run.py\n", - "\u001b[0;31mType:\u001b[0m function" + "\u001B[0;31mSignature:\u001B[0m \u001B[0mRunnable\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0m_parse_executor\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mexecutor\u001B[0m\u001B[0;34m)\u001B[0m \u001B[0;34m->\u001B[0m \u001B[0;34m'StdLibExecutor'\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n", + "\u001B[0;31mSource:\u001B[0m \n", + " \u001B[0;34m@\u001B[0m\u001B[0mstaticmethod\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m \u001B[0;32mdef\u001B[0m \u001B[0m_parse_executor\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mexecutor\u001B[0m\u001B[0;34m)\u001B[0m \u001B[0;34m->\u001B[0m \u001B[0mStdLibExecutor\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m \u001B[0;34m\"\"\"\u001B[0m\n", + "\u001B[0;34m We may want to allow users to specify how to build an executor rather than\u001B[0m\n", + "\u001B[0;34m actually providing an executor instance -- so here we can interpret these.\u001B[0m\n", + "\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m NOTE:\u001B[0m\n", + "\u001B[0;34m `concurrent.futures.Executor` _won't_ actually work, because we need\u001B[0m\n", + "\u001B[0;34m stuff with :mod:`cloudpickle` support. We're leaning on this for a guaranteed\u001B[0m\n", + "\u001B[0;34m interface (has `submit` and returns a `Future`), and leaving it to the user\u001B[0m\n", + "\u001B[0;34m to provide an executor that will actually work!!!\u001B[0m\n", + "\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m NOTE:\u001B[0m\n", + "\u001B[0;34m If, in the future, this parser is extended to instantiate new executors from\u001B[0m\n", + "\u001B[0;34m instructions, these new instances may not be caught by the\u001B[0m\n", + "\u001B[0;34m `executor_shutdown` method. This will require some re-engineering to make\u001B[0m\n", + "\u001B[0;34m sure we don't have dangling executors.\u001B[0m\n", + "\u001B[0;34m \"\"\"\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m \u001B[0;32mif\u001B[0m \u001B[0misinstance\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mexecutor\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mStdLibExecutor\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m \u001B[0;32mreturn\u001B[0m \u001B[0mexecutor\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m \u001B[0;32melse\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m \u001B[0;32mraise\u001B[0m \u001B[0mNotImplementedError\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m \u001B[0;34mf\"Expected an instance of {StdLibExecutor}, but got {executor}.\"\u001B[0m\u001B[0;34m\u001B[0m\n", + "\u001B[0;34m\u001B[0m \u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n", + "\u001B[0;31mFile:\u001B[0m ~/work/pyiron/pyiron_workflow/pyiron_workflow/mixin/run.py\n", + "\u001B[0;31mType:\u001B[0m function" ] }, "metadata": {}, @@ -4112,7 +4112,7 @@ "source": [ "## Storage interfaces\n", "\n", - "We saw in `quickstart.ipynb` how to save and load nodes to/from their default location (based on the current working directory and their semantic label), and how to use checkpointing and autoloading with the built-in `\"pickle\"` storage back end.\n", + "We saw in `quickstart.ipynb` how to save and load nodes to/from their default location (based on the current working directory and their lexical label), and how to use checkpointing and autoloading with the built-in `\"pickle\"` storage back end.\n", "\n", "We can also save and load using an explicit storage interface instance. This allows us to save and load at non-standard locations:" ] diff --git a/notebooks/hpc_example.ipynb b/notebooks/hpc_example.ipynb index 1a34b0676..713db3886 100644 --- a/notebooks/hpc_example.ipynb +++ b/notebooks/hpc_example.ipynb @@ -75,7 +75,7 @@ " \n", " if job_name is None:\n", " job_name = node.full_label \n", - " job_name = job_name.replace(node.semantic_delimiter, \"_\")\n", + " job_name = job_name.replace(node.lexical_delimiter, \"_\")\n", " job_name = \"pwf\" + job_name\n", " \n", " script_content = f\"\"\"#!/bin/bash\n", @@ -1386,25 +1386,25 @@ "evalue": "", "output_type": "error", "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)", - "File \u001b[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/nodes/composite.py:168\u001b[0m, in \u001b[0;36mComposite._run_while_children_or_signals_exist\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 167\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 168\u001b[0m firing, receiving \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msignal_queue\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpop\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 169\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n", - "\u001b[0;31mIndexError\u001b[0m: pop from empty list", + "\u001B[0;31m---------------------------------------------------------------------------\u001B[0m", + "\u001B[0;31mIndexError\u001B[0m Traceback (most recent call last)", + "File \u001B[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/nodes/composite.py:168\u001B[0m, in \u001B[0;36mComposite._run_while_children_or_signals_exist\u001B[0;34m(self)\u001B[0m\n\u001B[1;32m 167\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[0;32m--> 168\u001B[0m firing, receiving \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43msignal_queue\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mpop\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;241;43m0\u001B[39;49m\u001B[43m)\u001B[49m\n\u001B[1;32m 169\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n", + "\u001B[0;31mIndexError\u001B[0m: pop from empty list", "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[16], line 10\u001b[0m\n\u001b[1;32m 8\u001b[0m wf\u001b[38;5;241m.\u001b[39msleep\u001b[38;5;241m.\u001b[39m_serialize_result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[1;32m 9\u001b[0m wf\u001b[38;5;241m.\u001b[39msleep\u001b[38;5;241m.\u001b[39mexecutor \u001b[38;5;241m=\u001b[39m exe\n\u001b[0;32m---> 10\u001b[0m \u001b[43mwf\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/node.py:758\u001b[0m, in \u001b[0;36mNode.__call__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 753\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__call__\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 754\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 755\u001b[0m \u001b[38;5;124;03m A shortcut for :meth:`pull` that automatically runs the entire set of upstream data\u001b[39;00m\n\u001b[1;32m 756\u001b[0m \u001b[38;5;124;03m dependencies all the way to the parent-most graph object.\u001b[39;00m\n\u001b[1;32m 757\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m--> 758\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpull\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_parent_trees_too\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/workflow.py:378\u001b[0m, in \u001b[0;36mWorkflow.pull\u001b[0;34m(self, run_parent_trees_too, **kwargs)\u001b[0m\n\u001b[1;32m 376\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mpull\u001b[39m(\u001b[38;5;28mself\u001b[39m, run_parent_trees_too\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 377\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Workflows are a parent-most object, so this simply runs without pulling.\"\"\"\u001b[39;00m\n\u001b[0;32m--> 378\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/workflow.py:367\u001b[0m, in \u001b[0;36mWorkflow.run\u001b[0;34m(self, check_readiness, **kwargs)\u001b[0m\n\u001b[1;32m 358\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mrun\u001b[39m(\n\u001b[1;32m 359\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 360\u001b[0m check_readiness: \u001b[38;5;28mbool\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 364\u001b[0m \u001b[38;5;66;03m# worry about running their data trees first, fetching their input, nor firing\u001b[39;00m\n\u001b[1;32m 365\u001b[0m \u001b[38;5;66;03m# their `ran` signal, hence the change in signature from Node.run\u001b[39;00m\n\u001b[0;32m--> 367\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 368\u001b[0m \u001b[43m \u001b[49m\u001b[43mrun_data_tree\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 369\u001b[0m \u001b[43m \u001b[49m\u001b[43mrun_parent_trees_too\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 370\u001b[0m \u001b[43m \u001b[49m\u001b[43mfetch_input\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 371\u001b[0m \u001b[43m \u001b[49m\u001b[43mcheck_readiness\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcheck_readiness\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 372\u001b[0m \u001b[43m \u001b[49m\u001b[43memit_ran_signal\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 373\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 374\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/node.py:483\u001b[0m, in \u001b[0;36mNode.run\u001b[0;34m(self, run_data_tree, run_parent_trees_too, fetch_input, check_readiness, raise_run_exceptions, emit_ran_signal, *args, **kwargs)\u001b[0m\n\u001b[1;32m 477\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[1;32m 478\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfull_label\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m is still waiting for a serialized result\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 479\u001b[0m )\n\u001b[1;32m 481\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mset_input_values(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m--> 483\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 484\u001b[0m \u001b[43m \u001b[49m\u001b[43mcheck_readiness\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcheck_readiness\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 485\u001b[0m \u001b[43m \u001b[49m\u001b[43mraise_run_exceptions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mraise_run_exceptions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 486\u001b[0m \u001b[43m \u001b[49m\u001b[43mbefore_run_kwargs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m{\u001b[49m\n\u001b[1;32m 487\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mrun_data_tree\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_data_tree\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 488\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mrun_parent_trees_too\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_parent_trees_too\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 489\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfetch_input\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mfetch_input\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 490\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43memit_ran_signal\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43memit_ran_signal\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 491\u001b[0m \u001b[43m \u001b[49m\u001b[43m}\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 492\u001b[0m \u001b[43m \u001b[49m\u001b[43mrun_finally_kwargs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m{\u001b[49m\n\u001b[1;32m 493\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43memit_ran_signal\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43memit_ran_signal\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 494\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mraise_run_exceptions\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mraise_run_exceptions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 495\u001b[0m \u001b[43m \u001b[49m\u001b[43m}\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 496\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/mixin/run.py:146\u001b[0m, in \u001b[0;36mRunnable.run\u001b[0;34m(self, check_readiness, raise_run_exceptions, before_run_kwargs, run_kwargs, run_exception_kwargs, run_finally_kwargs, finish_run_kwargs)\u001b[0m\n\u001b[1;32m 141\u001b[0m executor \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 142\u001b[0m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mexecutor \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_parse_executor(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mexecutor)\n\u001b[1;32m 143\u001b[0m )\n\u001b[1;32m 145\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrunning \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[0;32m--> 146\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_run\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 147\u001b[0m \u001b[43m \u001b[49m\u001b[43mexecutor\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mexecutor\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 148\u001b[0m \u001b[43m \u001b[49m\u001b[43mraise_run_exceptions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mraise_run_exceptions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 149\u001b[0m \u001b[43m \u001b[49m\u001b[43mrun_exception_kwargs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrun_exception_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 150\u001b[0m \u001b[43m \u001b[49m\u001b[43mrun_finally_kwargs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrun_finally_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 151\u001b[0m \u001b[43m \u001b[49m\u001b[43mfinish_run_kwargs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfinish_run_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 152\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mrun_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 153\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/node.py:539\u001b[0m, in \u001b[0;36mNode._run\u001b[0;34m(self, executor, raise_run_exceptions, run_exception_kwargs, run_finally_kwargs, finish_run_kwargs)\u001b[0m\n\u001b[1;32m 537\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mparent \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 538\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mparent\u001b[38;5;241m.\u001b[39mregister_child_starting(\u001b[38;5;28mself\u001b[39m)\n\u001b[0;32m--> 539\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_run\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 540\u001b[0m \u001b[43m \u001b[49m\u001b[43mexecutor\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mexecutor\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 541\u001b[0m \u001b[43m \u001b[49m\u001b[43mraise_run_exceptions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mraise_run_exceptions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 542\u001b[0m \u001b[43m \u001b[49m\u001b[43mrun_exception_kwargs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrun_exception_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 543\u001b[0m \u001b[43m \u001b[49m\u001b[43mrun_finally_kwargs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrun_finally_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 544\u001b[0m \u001b[43m \u001b[49m\u001b[43mfinish_run_kwargs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfinish_run_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 545\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/mixin/run.py:214\u001b[0m, in \u001b[0;36mRunnable._run\u001b[0;34m(self, executor, raise_run_exceptions, run_exception_kwargs, run_finally_kwargs, finish_run_kwargs, **kwargs)\u001b[0m\n\u001b[1;32m 212\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_run_finally(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mrun_finally_kwargs)\n\u001b[1;32m 213\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m raise_run_exceptions:\n\u001b[0;32m--> 214\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[1;32m 215\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 216\u001b[0m run_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", - "File \u001b[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/mixin/run.py:209\u001b[0m, in \u001b[0;36mRunnable._run\u001b[0;34m(self, executor, raise_run_exceptions, run_exception_kwargs, run_finally_kwargs, finish_run_kwargs, **kwargs)\u001b[0m\n\u001b[1;32m 207\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m executor \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 208\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 209\u001b[0m run_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mon_run\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mon_run_args\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mon_run_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 210\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mException\u001b[39;00m, \u001b[38;5;167;01mKeyboardInterrupt\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 211\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_run_exception(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mrun_exception_kwargs)\n", - "File \u001b[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/node.py:387\u001b[0m, in \u001b[0;36mNode.on_run\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 385\u001b[0m save_result: \u001b[38;5;28mbool\u001b[39m \u001b[38;5;241m=\u001b[39m args[\u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 386\u001b[0m args \u001b[38;5;241m=\u001b[39m args[\u001b[38;5;241m1\u001b[39m:]\n\u001b[0;32m--> 387\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_on_run\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 388\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m save_result:\n\u001b[1;32m 389\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_temporary_result_pickle(result)\n", - "File \u001b[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/nodes/composite.py:160\u001b[0m, in \u001b[0;36mComposite._on_run\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 157\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m node \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstarting_nodes:\n\u001b[1;32m 158\u001b[0m node\u001b[38;5;241m.\u001b[39mrun()\n\u001b[0;32m--> 160\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_run_while_children_or_signals_exist\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 162\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\n", - "File \u001b[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/nodes/composite.py:175\u001b[0m, in \u001b[0;36mComposite._run_while_children_or_signals_exist\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 172\u001b[0m errors[receiving\u001b[38;5;241m.\u001b[39mfull_label] \u001b[38;5;241m=\u001b[39m e\n\u001b[1;32m 173\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mIndexError\u001b[39;00m:\n\u001b[1;32m 174\u001b[0m \u001b[38;5;66;03m# The signal queue is empty, but there is still someone running...\u001b[39;00m\n\u001b[0;32m--> 175\u001b[0m \u001b[43msleep\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_child_sleep_interval\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 177\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(errors) \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m1\u001b[39m:\n\u001b[1;32m 178\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m FailedChildError(\n\u001b[1;32m 179\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfull_label\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m encountered error in child: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00merrors\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 180\u001b[0m ) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mnext\u001b[39;00m(\u001b[38;5;28miter\u001b[39m(errors\u001b[38;5;241m.\u001b[39mvalues()))\n", - "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + "\u001B[0;31mKeyboardInterrupt\u001B[0m Traceback (most recent call last)", + "Cell \u001B[0;32mIn[16], line 10\u001B[0m\n\u001B[1;32m 8\u001B[0m wf\u001B[38;5;241m.\u001B[39msleep\u001B[38;5;241m.\u001B[39m_serialize_result \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;01mTrue\u001B[39;00m\n\u001B[1;32m 9\u001B[0m wf\u001B[38;5;241m.\u001B[39msleep\u001B[38;5;241m.\u001B[39mexecutor \u001B[38;5;241m=\u001B[39m exe\n\u001B[0;32m---> 10\u001B[0m \u001B[43mwf\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\n", + "File \u001B[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/node.py:758\u001B[0m, in \u001B[0;36mNode.__call__\u001B[0;34m(self, *args, **kwargs)\u001B[0m\n\u001B[1;32m 753\u001B[0m \u001B[38;5;28;01mdef\u001B[39;00m \u001B[38;5;21m__call__\u001B[39m(\u001B[38;5;28mself\u001B[39m, \u001B[38;5;241m*\u001B[39margs, \u001B[38;5;241m*\u001B[39m\u001B[38;5;241m*\u001B[39mkwargs) \u001B[38;5;241m-\u001B[39m\u001B[38;5;241m>\u001B[39m \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[1;32m 754\u001B[0m \u001B[38;5;250m \u001B[39m\u001B[38;5;124;03m\"\"\"\u001B[39;00m\n\u001B[1;32m 755\u001B[0m \u001B[38;5;124;03m A shortcut for :meth:`pull` that automatically runs the entire set of upstream data\u001B[39;00m\n\u001B[1;32m 756\u001B[0m \u001B[38;5;124;03m dependencies all the way to the parent-most graph object.\u001B[39;00m\n\u001B[1;32m 757\u001B[0m \u001B[38;5;124;03m \"\"\"\u001B[39;00m\n\u001B[0;32m--> 758\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mpull\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43margs\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mrun_parent_trees_too\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mTrue\u001B[39;49;00m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n", + "File \u001B[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/workflow.py:378\u001B[0m, in \u001B[0;36mWorkflow.pull\u001B[0;34m(self, run_parent_trees_too, **kwargs)\u001B[0m\n\u001B[1;32m 376\u001B[0m \u001B[38;5;28;01mdef\u001B[39;00m \u001B[38;5;21mpull\u001B[39m(\u001B[38;5;28mself\u001B[39m, run_parent_trees_too\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mFalse\u001B[39;00m, \u001B[38;5;241m*\u001B[39m\u001B[38;5;241m*\u001B[39mkwargs):\n\u001B[1;32m 377\u001B[0m \u001B[38;5;250m \u001B[39m\u001B[38;5;124;03m\"\"\"Workflows are a parent-most object, so this simply runs without pulling.\"\"\"\u001B[39;00m\n\u001B[0;32m--> 378\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mrun\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n", + "File \u001B[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/workflow.py:367\u001B[0m, in \u001B[0;36mWorkflow.run\u001B[0;34m(self, check_readiness, **kwargs)\u001B[0m\n\u001B[1;32m 358\u001B[0m \u001B[38;5;28;01mdef\u001B[39;00m \u001B[38;5;21mrun\u001B[39m(\n\u001B[1;32m 359\u001B[0m \u001B[38;5;28mself\u001B[39m,\n\u001B[1;32m 360\u001B[0m check_readiness: \u001B[38;5;28mbool\u001B[39m \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;01mTrue\u001B[39;00m,\n\u001B[0;32m (...)\u001B[0m\n\u001B[1;32m 364\u001B[0m \u001B[38;5;66;03m# worry about running their data trees first, fetching their input, nor firing\u001B[39;00m\n\u001B[1;32m 365\u001B[0m \u001B[38;5;66;03m# their `ran` signal, hence the change in signature from Node.run\u001B[39;00m\n\u001B[0;32m--> 367\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;43msuper\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mrun\u001B[49m\u001B[43m(\u001B[49m\n\u001B[1;32m 368\u001B[0m \u001B[43m \u001B[49m\u001B[43mrun_data_tree\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mFalse\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[1;32m 369\u001B[0m \u001B[43m \u001B[49m\u001B[43mrun_parent_trees_too\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mFalse\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[1;32m 370\u001B[0m \u001B[43m \u001B[49m\u001B[43mfetch_input\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mFalse\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[1;32m 371\u001B[0m \u001B[43m \u001B[49m\u001B[43mcheck_readiness\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mcheck_readiness\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 372\u001B[0m \u001B[43m \u001B[49m\u001B[43memit_ran_signal\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mFalse\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[1;32m 373\u001B[0m \u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 374\u001B[0m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n", + "File \u001B[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/node.py:483\u001B[0m, in \u001B[0;36mNode.run\u001B[0;34m(self, run_data_tree, run_parent_trees_too, fetch_input, check_readiness, raise_run_exceptions, emit_ran_signal, *args, **kwargs)\u001B[0m\n\u001B[1;32m 477\u001B[0m \u001B[38;5;28;01mraise\u001B[39;00m \u001B[38;5;167;01mValueError\u001B[39;00m(\n\u001B[1;32m 478\u001B[0m \u001B[38;5;124mf\u001B[39m\u001B[38;5;124m\"\u001B[39m\u001B[38;5;132;01m{\u001B[39;00m\u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mfull_label\u001B[38;5;132;01m}\u001B[39;00m\u001B[38;5;124m is still waiting for a serialized result\u001B[39m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m 479\u001B[0m )\n\u001B[1;32m 481\u001B[0m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mset_input_values(\u001B[38;5;241m*\u001B[39margs, \u001B[38;5;241m*\u001B[39m\u001B[38;5;241m*\u001B[39mkwargs)\n\u001B[0;32m--> 483\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;43msuper\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mrun\u001B[49m\u001B[43m(\u001B[49m\n\u001B[1;32m 484\u001B[0m \u001B[43m \u001B[49m\u001B[43mcheck_readiness\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mcheck_readiness\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 485\u001B[0m \u001B[43m \u001B[49m\u001B[43mraise_run_exceptions\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mraise_run_exceptions\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 486\u001B[0m \u001B[43m \u001B[49m\u001B[43mbefore_run_kwargs\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43m{\u001B[49m\n\u001B[1;32m 487\u001B[0m \u001B[43m \u001B[49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[38;5;124;43mrun_data_tree\u001B[39;49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[43m:\u001B[49m\u001B[43m \u001B[49m\u001B[43mrun_data_tree\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 488\u001B[0m \u001B[43m \u001B[49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[38;5;124;43mrun_parent_trees_too\u001B[39;49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[43m:\u001B[49m\u001B[43m \u001B[49m\u001B[43mrun_parent_trees_too\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 489\u001B[0m \u001B[43m \u001B[49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[38;5;124;43mfetch_input\u001B[39;49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[43m:\u001B[49m\u001B[43m \u001B[49m\u001B[43mfetch_input\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 490\u001B[0m \u001B[43m \u001B[49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[38;5;124;43memit_ran_signal\u001B[39;49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[43m:\u001B[49m\u001B[43m \u001B[49m\u001B[43memit_ran_signal\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 491\u001B[0m \u001B[43m \u001B[49m\u001B[43m}\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 492\u001B[0m \u001B[43m \u001B[49m\u001B[43mrun_finally_kwargs\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43m{\u001B[49m\n\u001B[1;32m 493\u001B[0m \u001B[43m \u001B[49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[38;5;124;43memit_ran_signal\u001B[39;49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[43m:\u001B[49m\u001B[43m \u001B[49m\u001B[43memit_ran_signal\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 494\u001B[0m \u001B[43m \u001B[49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[38;5;124;43mraise_run_exceptions\u001B[39;49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[43m:\u001B[49m\u001B[43m \u001B[49m\u001B[43mraise_run_exceptions\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 495\u001B[0m \u001B[43m \u001B[49m\u001B[43m}\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 496\u001B[0m \u001B[43m\u001B[49m\u001B[43m)\u001B[49m\n", + "File \u001B[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/mixin/run.py:146\u001B[0m, in \u001B[0;36mRunnable.run\u001B[0;34m(self, check_readiness, raise_run_exceptions, before_run_kwargs, run_kwargs, run_exception_kwargs, run_finally_kwargs, finish_run_kwargs)\u001B[0m\n\u001B[1;32m 141\u001B[0m executor \u001B[38;5;241m=\u001B[39m (\n\u001B[1;32m 142\u001B[0m \u001B[38;5;28;01mNone\u001B[39;00m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mexecutor \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m \u001B[38;5;28;01melse\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_parse_executor(\u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mexecutor)\n\u001B[1;32m 143\u001B[0m )\n\u001B[1;32m 145\u001B[0m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mrunning \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;01mTrue\u001B[39;00m\n\u001B[0;32m--> 146\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_run\u001B[49m\u001B[43m(\u001B[49m\n\u001B[1;32m 147\u001B[0m \u001B[43m \u001B[49m\u001B[43mexecutor\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mexecutor\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 148\u001B[0m \u001B[43m \u001B[49m\u001B[43mraise_run_exceptions\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mraise_run_exceptions\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 149\u001B[0m \u001B[43m \u001B[49m\u001B[43mrun_exception_kwargs\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mrun_exception_kwargs\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 150\u001B[0m \u001B[43m \u001B[49m\u001B[43mrun_finally_kwargs\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mrun_finally_kwargs\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 151\u001B[0m \u001B[43m \u001B[49m\u001B[43mfinish_run_kwargs\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mfinish_run_kwargs\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 152\u001B[0m \u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mrun_kwargs\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 153\u001B[0m \u001B[43m\u001B[49m\u001B[43m)\u001B[49m\n", + "File \u001B[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/node.py:539\u001B[0m, in \u001B[0;36mNode._run\u001B[0;34m(self, executor, raise_run_exceptions, run_exception_kwargs, run_finally_kwargs, finish_run_kwargs)\u001B[0m\n\u001B[1;32m 537\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mparent \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[1;32m 538\u001B[0m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mparent\u001B[38;5;241m.\u001B[39mregister_child_starting(\u001B[38;5;28mself\u001B[39m)\n\u001B[0;32m--> 539\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;43msuper\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_run\u001B[49m\u001B[43m(\u001B[49m\n\u001B[1;32m 540\u001B[0m \u001B[43m \u001B[49m\u001B[43mexecutor\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mexecutor\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 541\u001B[0m \u001B[43m \u001B[49m\u001B[43mraise_run_exceptions\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mraise_run_exceptions\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 542\u001B[0m \u001B[43m \u001B[49m\u001B[43mrun_exception_kwargs\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mrun_exception_kwargs\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 543\u001B[0m \u001B[43m \u001B[49m\u001B[43mrun_finally_kwargs\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mrun_finally_kwargs\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 544\u001B[0m \u001B[43m \u001B[49m\u001B[43mfinish_run_kwargs\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mfinish_run_kwargs\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 545\u001B[0m \u001B[43m\u001B[49m\u001B[43m)\u001B[49m\n", + "File \u001B[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/mixin/run.py:214\u001B[0m, in \u001B[0;36mRunnable._run\u001B[0;34m(self, executor, raise_run_exceptions, run_exception_kwargs, run_finally_kwargs, finish_run_kwargs, **kwargs)\u001B[0m\n\u001B[1;32m 212\u001B[0m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_run_finally(\u001B[38;5;241m*\u001B[39m\u001B[38;5;241m*\u001B[39mrun_finally_kwargs)\n\u001B[1;32m 213\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m raise_run_exceptions:\n\u001B[0;32m--> 214\u001B[0m \u001B[38;5;28;01mraise\u001B[39;00m e\n\u001B[1;32m 215\u001B[0m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[1;32m 216\u001B[0m run_output \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;01mNone\u001B[39;00m\n", + "File \u001B[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/mixin/run.py:209\u001B[0m, in \u001B[0;36mRunnable._run\u001B[0;34m(self, executor, raise_run_exceptions, run_exception_kwargs, run_finally_kwargs, finish_run_kwargs, **kwargs)\u001B[0m\n\u001B[1;32m 207\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m executor \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[1;32m 208\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[0;32m--> 209\u001B[0m run_output \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mon_run\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mon_run_args\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mon_run_kwargs\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 210\u001B[0m \u001B[38;5;28;01mexcept\u001B[39;00m (\u001B[38;5;167;01mException\u001B[39;00m, \u001B[38;5;167;01mKeyboardInterrupt\u001B[39;00m) \u001B[38;5;28;01mas\u001B[39;00m e:\n\u001B[1;32m 211\u001B[0m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_run_exception(\u001B[38;5;241m*\u001B[39m\u001B[38;5;241m*\u001B[39mrun_exception_kwargs)\n", + "File \u001B[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/node.py:387\u001B[0m, in \u001B[0;36mNode.on_run\u001B[0;34m(self, *args, **kwargs)\u001B[0m\n\u001B[1;32m 385\u001B[0m save_result: \u001B[38;5;28mbool\u001B[39m \u001B[38;5;241m=\u001B[39m args[\u001B[38;5;241m0\u001B[39m]\n\u001B[1;32m 386\u001B[0m args \u001B[38;5;241m=\u001B[39m args[\u001B[38;5;241m1\u001B[39m:]\n\u001B[0;32m--> 387\u001B[0m result \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_on_run\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43margs\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 388\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m save_result:\n\u001B[1;32m 389\u001B[0m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_temporary_result_pickle(result)\n", + "File \u001B[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/nodes/composite.py:160\u001B[0m, in \u001B[0;36mComposite._on_run\u001B[0;34m(self)\u001B[0m\n\u001B[1;32m 157\u001B[0m \u001B[38;5;28;01mfor\u001B[39;00m node \u001B[38;5;129;01min\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mstarting_nodes:\n\u001B[1;32m 158\u001B[0m node\u001B[38;5;241m.\u001B[39mrun()\n\u001B[0;32m--> 160\u001B[0m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_run_while_children_or_signals_exist\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 162\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28mself\u001B[39m\n", + "File \u001B[0;32m~/greyhaven/pyiron_workflow/pyiron_workflow/nodes/composite.py:175\u001B[0m, in \u001B[0;36mComposite._run_while_children_or_signals_exist\u001B[0;34m(self)\u001B[0m\n\u001B[1;32m 172\u001B[0m errors[receiving\u001B[38;5;241m.\u001B[39mfull_label] \u001B[38;5;241m=\u001B[39m e\n\u001B[1;32m 173\u001B[0m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mIndexError\u001B[39;00m:\n\u001B[1;32m 174\u001B[0m \u001B[38;5;66;03m# The signal queue is empty, but there is still someone running...\u001B[39;00m\n\u001B[0;32m--> 175\u001B[0m \u001B[43msleep\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_child_sleep_interval\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 177\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mlen\u001B[39m(errors) \u001B[38;5;241m==\u001B[39m \u001B[38;5;241m1\u001B[39m:\n\u001B[1;32m 178\u001B[0m \u001B[38;5;28;01mraise\u001B[39;00m FailedChildError(\n\u001B[1;32m 179\u001B[0m \u001B[38;5;124mf\u001B[39m\u001B[38;5;124m\"\u001B[39m\u001B[38;5;132;01m{\u001B[39;00m\u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mfull_label\u001B[38;5;132;01m}\u001B[39;00m\u001B[38;5;124m encountered error in child: \u001B[39m\u001B[38;5;132;01m{\u001B[39;00merrors\u001B[38;5;132;01m}\u001B[39;00m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m 180\u001B[0m ) \u001B[38;5;28;01mfrom\u001B[39;00m \u001B[38;5;21;01mnext\u001B[39;00m(\u001B[38;5;28miter\u001B[39m(errors\u001B[38;5;241m.\u001B[39mvalues()))\n", + "\u001B[0;31mKeyboardInterrupt\u001B[0m: " ] } ], @@ -1928,7 +1928,7 @@ "id": "1e29f9d9-85e6-4081-93f7-1bd1ec72c6e2", "metadata": {}, "source": [ - "We can look in the filesystem along the remotely-executed node's semantic path to see when the `run_result.tmp` file is available, this is the cloudpickled result from the remote execution. (In a real case we'd probably be more conveniently looking at a list of jobs running on the queue to see if ours was finished, or leveraging some other syntactic shortcut to see if the job is done, but convenient UI tools around this functionality aren't done yet.)\n", + "We can look in the filesystem along the remotely-executed node's lexical path to see when the `run_result.tmp` file is available, this is the cloudpickled result from the remote execution. (In a real case we'd probably be more conveniently looking at a list of jobs running on the queue to see if ours was finished, or leveraging some other syntactic shortcut to see if the job is done, but convenient UI tools around this functionality aren't done yet.)\n", "\n", "Now we simply reset the failed status and run the workflow again:" ] diff --git a/notebooks/quickstart.ipynb b/notebooks/quickstart.ipynb index 474509e10..4f993437d 100644 --- a/notebooks/quickstart.ipynb +++ b/notebooks/quickstart.ipynb @@ -2105,9 +2105,7 @@ "cell_type": "markdown", "id": "2075fbb9-36f6-4e2e-9ce6-73120b8dc963", "metadata": {}, - "source": [ - "This creates a new save-file at a canonical path based on your workflow's semantic labeling:" - ] + "source": "This creates a new save-file at a canonical path based on your workflow's lexical labeling:" }, { "cell_type": "code", diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index 75e5f3e3e..563ae5d96 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -112,7 +112,7 @@ def scoped_label(self) -> str: @property def full_label(self) -> str: - """A label combining the channel's usual label and its owner's semantic path""" + """A label combining the channel's usual label and its owner's lexical path""" return f"{self.owner.full_label}.{self.label}" def connect(self, *others: ConjugateType) -> None: diff --git a/pyiron_workflow/mixin/semantics.py b/pyiron_workflow/mixin/lexical.py similarity index 79% rename from pyiron_workflow/mixin/semantics.py rename to pyiron_workflow/mixin/lexical.py index 0659fa75d..25993898d 100644 --- a/pyiron_workflow/mixin/semantics.py +++ b/pyiron_workflow/mixin/lexical.py @@ -1,15 +1,15 @@ """ -Classes for "semantic" reasoning. +Classes for "lexical" reasoning. The motivation here is to be able to provide the object with a unique identifier -in the context of other semantic objects. Each object may have at most one parent, -while semantic parents may have an arbitrary number of children, and each child's name -must be unique in the scope of that parent. In this way, when semantic parents are also -themselves semantic, we can build a path from the parent-most object to any child that +in the context of other lexical objects. Each object may have at most one parent, +while lexical parents may have an arbitrary number of children, and each child's name +must be unique in the scope of that parent. In this way, when lexical parents are also +themselves lexical, we can build a path from the parent-most object to any child that is completely unique. The typical filesystem on a computer is an excellent example and fulfills our requirements, the only reason we depart from it is so that we are free to have objects stored in different locations (possibly even on totally -different drives or machines) belong to the same semantic group. +different drives or machines) belong to the same lexical group. """ from __future__ import annotations @@ -24,18 +24,18 @@ from pyiron_workflow.logging import logger from pyiron_workflow.mixin.has_interface_mixins import HasLabel, UsesState -ParentType = TypeVar("ParentType", bound="SemanticParent") +ParentType = TypeVar("ParentType", bound="LexicalParent") -class Semantic(UsesState, HasLabel, Generic[ParentType], ABC): +class Lexical(UsesState, HasLabel, Generic[ParentType], ABC): """ - An object with a unique semantic path. + An object with a unique lexical path. - The semantic parent object (if any), and the parent-most object are both easily + The lexical parent object (if any), and the parent-most object are both easily accessible. """ - semantic_delimiter: ClassVar[str] = "/" + lexical_delimiter: ClassVar[str] = "/" def __init__( self, @@ -58,9 +58,9 @@ def parent_type(cls) -> type[ParentType]: def _check_label(self, new_label: str) -> None: super()._check_label(new_label) - if self.semantic_delimiter in new_label: + if self.lexical_delimiter in new_label: raise ValueError( - f"Semantic delimiter {self.semantic_delimiter} cannot be in new label " + f"Lexical delimiter {self.lexical_delimiter} cannot be in new label " f"{new_label}" ) @@ -101,7 +101,7 @@ def _set_parent(self, new_parent: ParentType | None): self._parent.add_child(self) @property - def semantic_path(self) -> str: + def lexical_path(self) -> str: """ The path of node labels from the graph root (parent-most node) down to this node. @@ -112,10 +112,10 @@ def semantic_path(self) -> str: elif self.parent is None and self.detached_parent_path is not None: prefix = self.detached_parent_path elif self.parent is not None and self.detached_parent_path is None: - if isinstance(self.parent, Semantic): - prefix = self.parent.semantic_path + if isinstance(self.parent, Lexical): + prefix = self.parent.lexical_path else: - prefix = self.semantic_delimiter + self.parent.label + prefix = self.lexical_delimiter + self.parent.label else: raise ValueError( f"The parent and detached path should not be able to take non-None " @@ -123,71 +123,71 @@ def semantic_path(self) -> str: f"{self.detached_parent_path}, respectively. Please raise an issue on " f"GitHub outlining how your reached this state." ) - return prefix + self.semantic_delimiter + self.label + return prefix + self.lexical_delimiter + self.label @property def detached_parent_path(self) -> str | None: """ - The get/set state cycle of :class:`Semantic` de-parents objects, but we may - still be interested in the semantic path -- e.g. if we `pickle` dump and load + The get/set state cycle of :class:`Lexical` de-parents objects, but we may + still be interested in the lexical path -- e.g. if we `pickle` dump and load the object we will lose parent information, but this will still hold what the path _was_ before the orphaning process. The detached path will get cleared if a new parent is set, but is otherwise - used as the root for the purposes of finding the semantic path. + used as the root for the purposes of finding the lexical path. """ return self._detached_parent_path @property def full_label(self) -> str: """ - A shortcut that combines the semantic path and label into a single string. + A shortcut that combines the lexical path and label into a single string. """ - return self.semantic_path + return self.lexical_path @property - def semantic_root(self) -> Semantic: - """The parent-most object in this semantic path; may be self.""" - if isinstance(self.parent, Semantic): - return self.parent.semantic_root + def lexical_root(self) -> Lexical: + """The parent-most object in this lexical path; may be self.""" + if isinstance(self.parent, Lexical): + return self.parent.lexical_root else: return self def as_path(self, root: Path | str | None = None) -> Path: """ - The semantic path as a :class:`pathlib.Path`, with a filesystem :param:`root` + The lexical path as a :class:`pathlib.Path`, with a filesystem :param:`root` (default is the current working directory). """ return (Path.cwd() if root is None else Path(root)).joinpath( - *self.semantic_path.split(self.semantic_delimiter) + *self.lexical_path.split(self.lexical_delimiter) ) def __getstate__(self): state = super().__getstate__() if self.parent is not None: - state["_detached_parent_path"] = self.parent.semantic_path + state["_detached_parent_path"] = self.parent.lexical_path state["_parent"] = None # Regarding removing parent from state: # Basically we want to avoid recursion during (de)serialization; when the # parent object is deserializing itself, _it_ should know who its children are # and inform them of this. # In the case the object gets passed to another process using __getstate__, - # this also avoids dragging our whole semantic parent graph along with us. + # this also avoids dragging our whole lexical parent graph along with us. return state class CyclicPathError(ValueError): """ - To be raised when adding a child would result in a cyclic semantic path. + To be raised when adding a child would result in a cyclic lexical path. """ -ChildType = TypeVar("ChildType", bound=Semantic) +ChildType = TypeVar("ChildType", bound=Lexical) -class SemanticParent(HasLabel, Generic[ChildType], ABC): +class LexicalParent(HasLabel, Generic[ChildType], ABC): """ - A labeled object with a collection of uniquely-named semantic children. + A labeled object with a collection of uniquely-named lexical children. Children should be added or removed via the :meth:`add_child` and :meth:`remove_child` methods and _not_ by direct manipulation of the @@ -198,7 +198,7 @@ class SemanticParent(HasLabel, Generic[ChildType], ABC): Iterating over the parent yields the children, and the length of the parent is the number of children. - When adding children or assigning parents, a check is performed on the semantic + When adding children or assigning parents, a check is performed on the lexical path to forbid cyclic paths. """ @@ -232,10 +232,10 @@ def child_labels(self) -> tuple[str]: def _check_label(self, new_label: str) -> None: super()._check_label(new_label) - if self.child_type().semantic_delimiter in new_label: + if self.child_type().lexical_delimiter in new_label: raise ValueError( - f"Child type ({self.child_type()}) semantic delimiter " - f"{self.child_type().semantic_delimiter} cannot be in new label " + f"Child type ({self.child_type()}) lexical delimiter " + f"{self.child_type().lexical_delimiter} cannot be in new label " f"{new_label}" ) @@ -316,7 +316,7 @@ def add_child( child.parent = self return child - def _ensure_child_has_no_other_parent(self, child: Semantic) -> None: + def _ensure_child_has_no_other_parent(self, child: Lexical) -> None: if child.parent is not None and child.parent is not self: raise ValueError( f"The child ({child.label}) already belongs to the parent " @@ -324,7 +324,7 @@ def _ensure_child_has_no_other_parent(self, child: Semantic) -> None: f"add it to this parent ({self.label})." ) - def _this_child_is_already_at_this_label(self, child: Semantic, label: str) -> bool: + def _this_child_is_already_at_this_label(self, child: Lexical, label: str) -> bool: return ( label == child.label and label in self.child_labels @@ -387,7 +387,7 @@ def __getstate__(self): # Remove the children from the state and store each element right in the state # -- the labels are guaranteed to not be attributes already so this is safe, - # and it makes sure that the state path matches the semantic path + # and it makes sure that the state path matches the lexical path del state["_children"] state["child_labels"] = self.child_labels for child in self: @@ -399,7 +399,7 @@ def __setstate__(self, state): # Reconstruct children from state # Remove them from the state as you go, so they don't hang around in the # __dict__ after we set state -- they were only there to start with to guarantee - # that the state path and the semantic path matched (i.e. without ".children." + # that the state path and the lexical path matched (i.e. without ".children." # in between) state["_children"] = bidict( {label: state.pop(label) for label in state.pop("child_labels")} @@ -411,19 +411,19 @@ def __setstate__(self, state): # Children purge their parent information in their __getstate__. This avoids # recursion, so we don't need to ship an entire graph off to a second process, - # but rather can send just the requested object and its scope (semantic + # but rather can send just the requested object and its scope (lexical # children). So, now return their parent to them: for child in self: child.parent = self -def _ensure_path_is_not_cyclic(parent, child: Semantic) -> None: - if isinstance(parent, Semantic) and parent.semantic_path.startswith( - child.semantic_path + child.semantic_delimiter +def _ensure_path_is_not_cyclic(parent, child: Lexical) -> None: + if isinstance(parent, Lexical) and parent.lexical_path.startswith( + child.lexical_path + child.lexical_delimiter ): raise CyclicPathError( f"{parent.label} cannot be the parent of {child.label}, because its " - f"semantic path is already in {child.label}'s path and cyclic paths " - f"are not allowed. (i.e. {child.semantic_path} is in " - f"{parent.semantic_path})" + f"lexical path is already in {child.label}'s path and cyclic paths " + f"are not allowed. (i.e. {child.lexical_path} is in " + f"{parent.lexical_path})" ) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 91541694f..bed659ba0 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -19,8 +19,8 @@ from pyiron_workflow.draw import Node as GraphvizNode from pyiron_workflow.logging import logger +from pyiron_workflow.mixin.lexical import Lexical from pyiron_workflow.mixin.run import ReadinessError, Runnable -from pyiron_workflow.mixin.semantics import Semantic from pyiron_workflow.mixin.single_output import ExploitsSingleOutput from pyiron_workflow.storage import StorageInterface, available_backends from pyiron_workflow.topology import ( @@ -39,7 +39,7 @@ class Node( - Semantic["Composite"], + Lexical["Composite"], Runnable, ExploitsSingleOutput, ABC, @@ -108,7 +108,7 @@ class Node( - Nodes can suppress raising errors they encounter by setting a runtime keyword argument. - Nodes have a label by which they are identified within their scope, and a full - label which is unique among the entire semantic graph they exist within + label which is unique among the entire lexical graph they exist within - Nodes can run their computation using remote resources by setting an executor - Any executor must have a :meth:`submit` method with the same interface as :class:`concurrent.futures.Executor`, must return a @@ -363,15 +363,15 @@ def _after_node_setup( @property def graph_path(self) -> str: """ - The path of node labels from the graph root (parent-most node in this semantic + The path of node labels from the graph root (parent-most node in this lexical path) down to this node. """ - prefix = self.parent.semantic_path if isinstance(self.parent, Node) else "" - return prefix + self.semantic_delimiter + self.label + prefix = self.parent.lexical_path if isinstance(self.parent, Node) else "" + return prefix + self.lexical_delimiter + self.label @property def graph_root(self) -> Node: - """The parent-most node in this semantic path.""" + """The parent-most node in this lexical path.""" return self.parent.graph_root if isinstance(self.parent, Node) else self def data_input_locked(self): @@ -795,7 +795,7 @@ def draw( :param:`view` or :param:`filename` is provided, this will be called before returning the graph. The graph file and rendered image will be stored in a directory based of the - node's semantic path, unless a :param:`directory` is explicitly set. + node's lexical path, unless a :param:`directory` is explicitly set. This is purely for convenience -- since we directly return a graphviz object you can instead use this to leverage the full power of graphviz. @@ -906,7 +906,7 @@ def save( node. (Default is "pickle", which loads the standard pickling back end.) filename (str | Path | None): The name of the file (without extensions) at which to save the node. (Default is None, which uses the node's - semantic path.) + lexical path.) **kwargs: Back end-specific keyword arguments. """ for selected_backend in available_backends( @@ -945,7 +945,7 @@ def load( try to load whatever you can find.) filename (str | Path | None): The name of the file (without extensions) at which to save the node. (Default is None, which uses the node's - semantic path.) + lexical path.) **kwargs: back end-specific arguments (only likely to work in combination with :param:`only_requested`, otherwise there's nothing to be specific _to_.) @@ -993,7 +993,7 @@ def delete_storage( try to load whatever you can find.) filename (str | Path | None): The name of the file (without extensions) at which to save the node. (Default is None, which uses the node's - semantic path.) + lexical path.) **kwargs: back end-specific arguments (only likely to work in combination with :param:`only_requested`, otherwise there's nothing to be specific _to_.) @@ -1023,7 +1023,7 @@ def has_saved_content( try to load whatever you can find.) filename (str | Path | None): The name of the file (without extensions) at which to save the node. (Default is None, which uses the node's - semantic path.) + lexical path.) **kwargs: back end-specific arguments (only likely to work in combination with :param:`only_requested`, otherwise there's nothing to be specific _to_.) @@ -1072,14 +1072,14 @@ def report_import_readiness(self, tabs=0, report_so_far=""): def _clean_graph_directory(self): """ Delete the temporary results file (if any), and then go from this node's - semantic directory up to its semantic root's directory removing any empty + lexical directory up to its lexical root's directory removing any empty directories. Note: doesn't do a sophisticated walk, so sibling empty directories will cause a parent to identify as non-empty. """ self._temporary_result_file.unlink(missing_ok=True) # Recursively remove empty directories - root_directory = self.semantic_root.as_path().parent + root_directory = self.lexical_root.as_path().parent for parent in self._temporary_result_file.parents: if parent == root_directory or not parent.exists() or any(parent.iterdir()): break diff --git a/pyiron_workflow/nodes/composite.py b/pyiron_workflow/nodes/composite.py index cbe8ebe81..daf5cbd2d 100644 --- a/pyiron_workflow/nodes/composite.py +++ b/pyiron_workflow/nodes/composite.py @@ -14,7 +14,7 @@ from pyiron_snippets.dotdict import DotDict from pyiron_workflow.create import HasCreator -from pyiron_workflow.mixin.semantics import SemanticParent +from pyiron_workflow.mixin.lexical import LexicalParent from pyiron_workflow.node import Node from pyiron_workflow.topology import set_run_connections_according_to_dag @@ -53,7 +53,7 @@ class FailedChildError(RuntimeError): """Raise when one or more child nodes raise exceptions.""" -class Composite(SemanticParent[Node], HasCreator, Node, ABC): +class Composite(LexicalParent[Node], HasCreator, Node, ABC): """ A base class for nodes that have internal graph structure -- i.e. they hold a collection of child nodes and their computation is to execute that graph. diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index 7f1b7b6f1..40cee293c 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -118,7 +118,7 @@ def load( Args: node (Node | None): The node to load. Optional if filename is provided. filename (str | Path | None): The path to the file to load (without file - extension). Uses the canonical filename based on the node's semantic + extension). Uses the canonical filename based on the node's lexical path instead if this is None. **kwargs: Additional keyword arguments. @@ -173,7 +173,7 @@ def _parse_filename( ) -> Path: """ Make sure the node xor filename was provided, and if it's the node, convert it - into a canonical filename by exploiting the node's semantic path. + into a canonical filename by exploiting the node's lexical path. """ if node is None and filename is None: raise ValueError( diff --git a/tests/unit/mixin/test_semantics.py b/tests/unit/mixin/test_lexical.py similarity index 56% rename from tests/unit/mixin/test_semantics.py rename to tests/unit/mixin/test_lexical.py index fbb9bac47..9ad9ff07e 100644 --- a/tests/unit/mixin/test_semantics.py +++ b/tests/unit/mixin/test_lexical.py @@ -3,38 +3,38 @@ import unittest from pathlib import Path -from pyiron_workflow.mixin.semantics import ( +from pyiron_workflow.mixin.lexical import ( CyclicPathError, - Semantic, - SemanticParent, + Lexical, + LexicalParent, ) -class ConcreteSemantic(Semantic["ConcreteParent"]): +class ConcreteLexical(Lexical["ConcreteParent"]): @classmethod - def parent_type(cls) -> type[ConcreteSemanticParent]: - return ConcreteSemanticParent + def parent_type(cls) -> type[ConcreteLexicalParent]: + return ConcreteLexicalParent -class ConcreteParent(SemanticParent[ConcreteSemantic]): +class ConcreteParent(LexicalParent[ConcreteLexical]): _label = "concrete_parent_default_label" @classmethod - def child_type(cls) -> type[ConcreteSemantic]: - return ConcreteSemantic + def child_type(cls) -> type[ConcreteLexical]: + return ConcreteLexical -class ConcreteSemanticParent(ConcreteParent, ConcreteSemantic): +class ConcreteLexicalParent(ConcreteParent, ConcreteLexical): pass -class TestSemantics(unittest.TestCase): +class TestLexical(unittest.TestCase): def setUp(self): - self.root = ConcreteSemanticParent(label="root") - self.child1 = ConcreteSemantic(label="child1", parent=self.root) - self.middle1 = ConcreteSemanticParent(label="middle", parent=self.root) - self.middle2 = ConcreteSemanticParent(label="middle_sub", parent=self.middle1) - self.child2 = ConcreteSemantic(label="child2", parent=self.middle2) + self.root = ConcreteLexicalParent(label="root") + self.child1 = ConcreteLexical(label="child1", parent=self.root) + self.middle1 = ConcreteLexicalParent(label="middle", parent=self.root) + self.middle2 = ConcreteLexicalParent(label="middle_sub", parent=self.middle1) + self.child2 = ConcreteLexical(label="child2", parent=self.middle2) def test_getattr(self): with self.assertRaises(AttributeError) as context: @@ -54,26 +54,26 @@ def test_getattr(self): def test_label_validity(self): with self.assertRaises(TypeError, msg="Label must be a string"): - ConcreteSemantic(label=123) + ConcreteLexical(label=123) def test_label_delimiter(self): with self.assertRaises( ValueError, - msg=f"Delimiter '{ConcreteSemantic.semantic_delimiter}' not allowed", + msg=f"Delimiter '{ConcreteLexical.lexical_delimiter}' not allowed", ): - ConcreteSemantic(label=f"invalid{ConcreteSemantic.semantic_delimiter}label") + ConcreteLexical(label=f"invalid{ConcreteLexical.lexical_delimiter}label") - non_semantic_parent = ConcreteParent() + non_lexical_parent = ConcreteParent() with self.assertRaises( ValueError, - msg=f"Delimiter '{ConcreteSemantic.semantic_delimiter}' not allowed", + msg=f"Delimiter '{ConcreteLexical.lexical_delimiter}' not allowed", ): - non_semantic_parent.label = f"contains_{non_semantic_parent.child_type().semantic_delimiter}_delimiter" + non_lexical_parent.label = f"contains_{non_lexical_parent.child_type().lexical_delimiter}_delimiter" - def test_semantic_delimiter(self): + def test_lexical_delimiter(self): self.assertEqual( "/", - ConcreteSemantic.semantic_delimiter, + ConcreteLexical.lexical_delimiter, msg="This is just a hard-code to the current value, update it freely so " "the test passes; if it fails it's just a reminder that your change is " "not backwards compatible, and the next release number should reflect " @@ -93,18 +93,18 @@ def test_parent(self): self.middle2.add_child(self.middle1) def test_path(self): - self.assertEqual(self.root.semantic_path, "/root") - self.assertEqual(self.child1.semantic_path, "/root/child1") - self.assertEqual(self.middle1.semantic_path, "/root/middle") - self.assertEqual(self.middle2.semantic_path, "/root/middle/middle_sub") - self.assertEqual(self.child2.semantic_path, "/root/middle/middle_sub/child2") + self.assertEqual(self.root.lexical_path, "/root") + self.assertEqual(self.child1.lexical_path, "/root/child1") + self.assertEqual(self.middle1.lexical_path, "/root/middle") + self.assertEqual(self.middle2.lexical_path, "/root/middle/middle_sub") + self.assertEqual(self.child2.lexical_path, "/root/middle/middle_sub/child2") def test_root(self): - self.assertEqual(self.root.semantic_root, self.root) - self.assertEqual(self.child1.semantic_root, self.root) - self.assertEqual(self.middle1.semantic_root, self.root) - self.assertEqual(self.middle2.semantic_root, self.root) - self.assertEqual(self.child2.semantic_root, self.root) + self.assertEqual(self.root.lexical_root, self.root) + self.assertEqual(self.child1.lexical_root, self.root) + self.assertEqual(self.middle1.lexical_root, self.root) + self.assertEqual(self.middle2.lexical_root, self.root) + self.assertEqual(self.child2.lexical_root, self.root) def test_as_path(self): self.assertEqual( @@ -127,21 +127,21 @@ def test_as_path(self): ) def test_detached_parent_path(self): - orphan = ConcreteSemantic(label="orphan") + orphan = ConcreteLexical(label="orphan") orphan.__setstate__(self.child2.__getstate__()) self.assertIsNone( orphan.parent, msg="We still should not explicitly have a parent" ) self.assertListEqual( - orphan.detached_parent_path.split(orphan.semantic_delimiter), - self.child2.semantic_path.split(orphan.semantic_delimiter)[:-1], - msg="Despite not having a parent, the detached path should store semantic " + orphan.detached_parent_path.split(orphan.lexical_delimiter), + self.child2.lexical_path.split(orphan.lexical_delimiter)[:-1], + msg="Despite not having a parent, the detached path should store lexical " "path info through the get/set state routine", ) self.assertEqual( - orphan.semantic_path, - self.child2.semantic_path, - msg="The detached path should carry through to semantic path in the " + orphan.lexical_path, + self.child2.lexical_path, + msg="The detached path should carry through to lexical path in the " "absence of a parent", ) orphan.label = "orphan" # Re-set label after getting state @@ -152,10 +152,10 @@ def test_detached_parent_path(self): "presence of a parent", ) self.assertListEqual( - orphan.semantic_path.split(orphan.semantic_delimiter)[:-1], - self.child2.semantic_path.split(self.child2.semantic_delimiter)[:-1], + orphan.lexical_path.split(orphan.lexical_delimiter)[:-1], + self.child2.lexical_path.split(self.child2.lexical_delimiter)[:-1], msg="Sanity check -- except for the now-different labels, we should be " - "recovering the usual semantic path on setting a parent.", + "recovering the usual lexical path on setting a parent.", ) diff --git a/tests/unit/nodes/test_composite.py b/tests/unit/nodes/test_composite.py index 614bc7ba0..8411abe59 100644 --- a/tests/unit/nodes/test_composite.py +++ b/tests/unit/nodes/test_composite.py @@ -469,7 +469,7 @@ def test_graph_info(self): with self.subTest("test_graph_path"): self.assertEqual( - top.semantic_delimiter + top.label, + top.lexical_delimiter + top.label, top.graph_path, msg="The parent-most node should be its own path.", ) @@ -598,7 +598,7 @@ def test_result_serialization(self): self.assertFalse( self.comp.as_path().is_dir(), msg="Actually, we expect cleanup to have removed empty directories up to " - "and including the semantic root's own directory", + "and including the lexical root's own directory", ) def test_empty(self): diff --git a/tests/unit/test_node.py b/tests/unit/test_node.py index cefb5f23d..a971b70fe 100644 --- a/tests/unit/test_node.py +++ b/tests/unit/test_node.py @@ -363,7 +363,7 @@ def test_graph_info(self): n = ANode() self.assertEqual( - n.semantic_delimiter + n.label, + n.lexical_delimiter + n.label, n.graph_path, msg="Lone nodes should just have their label as the path, as there is no " "parent above.",