diff --git a/notebooks/atomistics_nodes.ipynb b/notebooks/atomistics_nodes.ipynb index cfd988d1d..915ac7954 100644 --- a/notebooks/atomistics_nodes.ipynb +++ b/notebooks/atomistics_nodes.ipynb @@ -27,9 +27,7 @@ "metadata": {}, "outputs": [], "source": [ - "Workflow.register(\"calculator\", \"pyiron_workflow.node_library.atomistics.calculator\")\n", - "Workflow.register(\"macro\", \"pyiron_workflow.node_library.atomistics.macro\")\n", - "Workflow.register(\"task\", \"pyiron_workflow.node_library.atomistics.task\")\n", + "Workflow.register(\"atomistics\", \"pyiron_workflow.node_library.atomistics\")\n", "Workflow.register(\"plotting\", \"pyiron_workflow.node_library.plotting\")" ] }, @@ -53,10 +51,10 @@ "source": [ "wf = Workflow(\"ev_curve\")\n", "\n", - "wf.structure = wf.create.task.Bulk(\"Al\")\n", - "wf.calculator = wf.create.calculator.Emt()\n", + "wf.structure = wf.create.atomistics.task.Bulk(\"Al\")\n", + "wf.calculator = wf.create.atomistics.calculator.Emt()\n", "\n", - "wf.ev = wf.create.macro.EnergyVolumeCurve(\n", + "wf.ev = wf.create.atomistics.macro.EnergyVolumeCurve(\n", " structure=wf.structure, \n", " calculator=wf.calculator,\n", ")\n", @@ -65,13 +63,13 @@ " wf.ev.outputs.result_dict['energy']\n", ")\n", "\n", - "wf.elastic = wf.create.macro.ElasticMatrix(\n", + "wf.elastic = wf.create.atomistics.macro.ElasticMatrix(\n", " structure=wf.structure, \n", " calculator=wf.calculator,\n", ")\n", "wf.C = wf.elastic.outputs.result_dict[\"C\"]\n", "\n", - "wf.phonons = wf.create.macro.Phonons(\n", + "wf.phonons = wf.create.atomistics.macro.Phonons(\n", " structure=wf.structure, \n", " calculator=wf.calculator,\n", ")\n", @@ -104,7 +102,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 4, @@ -170,7 +168,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 6, diff --git a/notebooks/deepdive.ipynb b/notebooks/deepdive.ipynb index 781dd5369..5e2f60432 100644 --- a/notebooks/deepdive.ipynb +++ b/notebooks/deepdive.ipynb @@ -1604,7 +1604,7 @@ "\n", "To access prebuilt nodes we can `.create` them. This works both from the workflow class _and_ from a workflow instance.\n", "\n", - "There are a few of nodes that are always available under the `Workflow.create.standard` namespace, otherwise we need to register new node packages. This is done with the `register` method, which takes the domain (namespace/key/attribute/whatever you want to call it) under which you want to register the new nodes, and a string import path to a module that has a list of nodes under the name `nodes`, i.e. the module has the property `nodes: list[pyiron_workflow.nodes.Node]`. (This API is subject to change, as we work to improve usability and bring node packages more and more in line with \"FAIR\" principles.)\n", + "There are a few of nodes that are always available under the `Workflow.create.standard` namespace, otherwise we need to register new node packages. This is done with the `register` method, which takes the domain (namespace/key/attribute/whatever you want to call it) under which you want to register the new nodes, and a string import path to a module that has a list of nodes under the name `nodes`, i.e. the module has the property `nodes: list[pyiron_workflow.nodes.Node]`. (You can also register a package with arbitrary nesting, as long as each non-package sub-module has such a `nodes` attribute; Cf. the atomistics demo notebook for an example of this.) (This API is subject to change, as we work to improve usability and bring node packages more and more in line with \"FAIR\" principles.)\n", "\n", "You can make your own `.py` files with nodes for reuse this way, but `pyiron_workflow` also comes with a couple of packages. In this example we'll use atomistics and plotting:" ] diff --git a/pyiron_workflow/interfaces.py b/pyiron_workflow/interfaces.py index 7288f490c..c045f2b37 100644 --- a/pyiron_workflow/interfaces.py +++ b/pyiron_workflow/interfaces.py @@ -5,6 +5,7 @@ from __future__ import annotations from importlib import import_module +import pkgutil from sys import version_info from pyiron_workflow.snippets.singleton import Singleton @@ -32,6 +33,7 @@ function_node, single_value_node, ) +from pyiron_workflow.snippets.dotdict import DotDict class Creator(metaclass=Singleton): @@ -122,10 +124,7 @@ def meta(self): def __getattr__(self, item): try: - module = import_module(self._node_packages[item]) - from pyiron_workflow.node_package import NodePackage - - return NodePackage(*module.nodes) + return self._node_packages[item][1] except KeyError as e: raise AttributeError( f"{self.__class__.__name__} could not find attribute {item} -- did you " @@ -173,9 +172,10 @@ def register(self, domain: str, package_identifier: str) -> None: elif domain in self.__dir__(): raise AttributeError(f"{domain} is already an attribute of {self}") - self._verify_identifier(package_identifier) - - self._node_packages[domain] = package_identifier + self._node_packages[domain] = ( + package_identifier, + self._import_nodes(package_identifier), + ) def _package_conflicts_with_existing( self, domain: str, package_identifier: str @@ -195,38 +195,48 @@ def _package_conflicts_with_existing( """ if domain in self._node_packages.keys(): # If it's already here, it had better be the same package - return package_identifier != self._node_packages[domain] + return package_identifier != self._node_packages[domain][0] # We can make "sameness" logic more complex as we allow more sophisticated # identifiers else: # If it's not here already, it can't conflict! return False - @staticmethod - def _verify_identifier(package_identifier: str): + def _import_nodes(self, package_identifier: str): """ - Logic for verifying whether new package identifiers will actually be usable for - creating node packages when their domain is called. Lets us fail early in - registration. - - Right now, we just make sure it's a string from which we can import a list of - nodes. + Recursively walk through all submodules of the provided package identifier, + and collect an instance of `nodes: list[Node]` from each non-package module. """ + + module = import_module(package_identifier) + if hasattr(module, "__path__"): + package = DotDict() + for _, submodule_name, _ in pkgutil.walk_packages( + module.__path__, module.__name__ + "." + ): + package[submodule_name.split(".")[-1]] = self._import_nodes( + submodule_name + ) + else: + package = self._get_nodes_from_module(module) + return package + + @staticmethod + def _get_nodes_from_module(module): from pyiron_workflow.node import Node + from pyiron_workflow.node_package import NodePackage try: - module = import_module(package_identifier) nodes = module.nodes - if not all(issubclass(node, Node) for node in nodes): - raise TypeError( - f"At least one node in {nodes} was not of the type {Node.__name__}" - ) - except Exception as e: + except AttributeError: raise ValueError( - f"The package identifier is {package_identifier} is not valid. Please " - f"ensure it is an importable module with a list of {Node.__name__} " - f"objects stored in the variable `nodes`." - ) from e + f"Could node find `nodes: list[Nodes]` in {module.__name__}" + ) + if not all(issubclass(node, Node) for node in nodes): + raise TypeError( + f"At least one node in {nodes} was not of the type {Node.__name__}" + ) + return NodePackage(*module.nodes) class Wrappers(metaclass=Singleton): diff --git a/tests/static/nodes_subpackage/__init__.py b/tests/static/nodes_subpackage/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/static/nodes_subpackage/demo_nodes.py b/tests/static/nodes_subpackage/demo_nodes.py new file mode 100644 index 000000000..533e10055 --- /dev/null +++ b/tests/static/nodes_subpackage/demo_nodes.py @@ -0,0 +1,16 @@ +""" +A demo node package for the purpose of testing node package registration. +""" + +from typing import Optional + +from pyiron_workflow import Workflow + + +@Workflow.wrap_as.single_value_node("sum") +def OptionallyAdd(x: int, y: Optional[int] = None) -> int: + y = 0 if y is None else y + return x + y + + +nodes = [OptionallyAdd] diff --git a/tests/static/nodes_subpackage/subsub_package/__init__.py b/tests/static/nodes_subpackage/subsub_package/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/static/nodes_subpackage/subsub_package/demo_nodes.py b/tests/static/nodes_subpackage/subsub_package/demo_nodes.py new file mode 100644 index 000000000..533e10055 --- /dev/null +++ b/tests/static/nodes_subpackage/subsub_package/demo_nodes.py @@ -0,0 +1,16 @@ +""" +A demo node package for the purpose of testing node package registration. +""" + +from typing import Optional + +from pyiron_workflow import Workflow + + +@Workflow.wrap_as.single_value_node("sum") +def OptionallyAdd(x: int, y: Optional[int] = None) -> int: + y = 0 if y is None else y + return x + y + + +nodes = [OptionallyAdd] diff --git a/tests/unit/test_interfaces.py b/tests/unit/test_interfaces.py index a25b49de5..32c763554 100644 --- a/tests/unit/test_interfaces.py +++ b/tests/unit/test_interfaces.py @@ -2,6 +2,7 @@ from pyiron_workflow._tests import ensure_tests_in_python_path from pyiron_workflow.interfaces import Creator +from pyiron_workflow.node_package import NodePackage class TestCreator(unittest.TestCase): @@ -29,6 +30,10 @@ def test_registration(self): msg="Node should get instantiated from creator and be operable" ) + self.creator.register("sub", "static.nodes_subpackage") + self.assertIsInstance(self.creator.sub.demo_nodes, NodePackage) + self.assertIsInstance(self.creator.sub.subsub_package.demo_nodes, NodePackage) + with self.subTest("Test re-registration"): self.creator.register("demo", "static.demo_nodes") # Same thing to the same location should be fine @@ -65,8 +70,8 @@ def test_registration(self): self.creator.register("forgetful", "static.forgetful_node_package") with self.assertRaises( - ValueError, - msg="Must have only nodes in the iterable `nodes` property" + TypeError, + msg="Must have only node classes in the iterable `nodes` property" ): self.creator.register("faulty", "static.faulty_node_package")