From b2c7a11ba8e994b5d8af914066291def39104a7e Mon Sep 17 00:00:00 2001 From: Frithjof Date: Tue, 20 Jun 2023 23:22:37 -0500 Subject: [PATCH] Allow tracking of component-specific execution data (#460) --- docs/examples/slurm-and-mpi-execution/mpi.py | 2 +- .../examples/slurm-and-mpi-execution/slurm.py | 20 +- .../slurm-and-mpi-execution/test_mpi.py | 2 +- .../slurm-and-mpi-execution/test_slurm.py | 2 +- docs/guide/component.md | 2 +- docs/guide/introduction.md | 2 +- poetry.lock | 253 +++++++------- pyproject.toml | 4 +- src/machinable/__init__.py | 2 +- src/machinable/collection.py | 15 +- src/machinable/component.py | 248 +++++--------- src/machinable/element.py | 33 +- src/machinable/execution.py | 312 ++++++++++++++++-- src/machinable/index.py | 211 ++++++------ src/machinable/interface.py | 96 +++--- src/machinable/project.py | 3 - src/machinable/schema.py | 1 + src/machinable/settings.py | 1 - src/machinable/utils.py | 13 +- tests/samples/project/count.py | 10 + tests/samples/project/fail.py | 7 + .../samples/project/interface/events_check.py | 6 +- tests/test_collection.py | 26 +- tests/test_component.py | 40 +-- tests/test_element.py | 11 + tests/test_execution.py | 200 +++++++---- tests/test_index.py | 20 +- tests/test_interface.py | 3 +- tests/test_project.py | 1 - tests/test_schedule.py | 4 +- tests/test_utils.py | 9 + 31 files changed, 946 insertions(+), 613 deletions(-) create mode 100644 tests/samples/project/count.py create mode 100644 tests/samples/project/fail.py diff --git a/docs/examples/slurm-and-mpi-execution/mpi.py b/docs/examples/slurm-and-mpi-execution/mpi.py index 72a50cc4..3076d495 100644 --- a/docs/examples/slurm-and-mpi-execution/mpi.py +++ b/docs/examples/slurm-and-mpi-execution/mpi.py @@ -13,7 +13,7 @@ class Config: def __call__(self): for executable in self.pending_executables: script_file = self.save_file( - f"mpi-{executable.id}.sh", + [executable, "mpi.sh"], executable.dispatch_code(), ) st = os.stat(script_file) diff --git a/docs/examples/slurm-and-mpi-execution/slurm.py b/docs/examples/slurm-and-mpi-execution/slurm.py index 82007fa2..9f9a3023 100644 --- a/docs/examples/slurm-and-mpi-execution/slurm.py +++ b/docs/examples/slurm-and-mpi-execution/slurm.py @@ -7,24 +7,28 @@ class Slurm(Execution): def __call__(self): script = "#!/usr/bin/env bash\n" - for component in self.pending_executables: - resources = component.resources() + for executable in self.pending_executables: + resources = executable.resources() if "--job-name" not in resources: - resources["--job-name"] = f"{component.id}" + resources["--job-name"] = f"{executable.id}" if "--output" not in resources: - resources["--output"] = component.local_directory("output.log") + resources["--output"] = self.local_directory( + executable.id, "output.log" + ) if "--open-mode" not in resources: resources["--open-mode"] = "append" sbatch_arguments = [] for k, v in resources.items(): + if not k.startswith("--"): + continue line = "#SBATCH " + k if v not in [None, True]: line += f"={v}" sbatch_arguments.append(line) script += "\n".join(sbatch_arguments) + "\n" - script += component.dispatch_code() + script += executable.dispatch_code() # submit to slurm process = subprocess.Popen( @@ -50,13 +54,13 @@ def __call__(self): except ValueError: job_id = False print( - f"{output} for component {component.id} ({component.local_directory()})" + f"{output} for component {executable.id} ({executable.local_directory()})" ) # save job information self.save_file( - filepath="slurm.json", - data={ + [executable, "slurm.json"], + { "job_id": job_id, "cmd": sbatch_arguments, "script": script, diff --git a/docs/examples/slurm-and-mpi-execution/test_mpi.py b/docs/examples/slurm-and-mpi-execution/test_mpi.py index fb2649ec..eb4f8ef0 100644 --- a/docs/examples/slurm-and-mpi-execution/test_mpi.py +++ b/docs/examples/slurm-and-mpi-execution/test_mpi.py @@ -27,5 +27,5 @@ def test_mpi_execution(tmp_path): component = MpiExample() with Execution.get("mpi"): component.launch() - assert component.is_finished() + assert component.execution.is_finished() assert component.load_file("test.txt") == "hello" diff --git a/docs/examples/slurm-and-mpi-execution/test_slurm.py b/docs/examples/slurm-and-mpi-execution/test_slurm.py index 691c5572..f56eec82 100644 --- a/docs/examples/slurm-and-mpi-execution/test_slurm.py +++ b/docs/examples/slurm-and-mpi-execution/test_slurm.py @@ -37,7 +37,7 @@ def test_slurm_execution(tmp_path): component.launch() for _ in range(60): - if component.is_finished(): + if component.execution.is_finished(): assert "Hello world from Slurm" in component.output() assert ( component.load_file("test_run.json")["success"] is True diff --git a/docs/guide/component.md b/docs/guide/component.md index 35596df9..bae2ff21 100644 --- a/docs/guide/component.md +++ b/docs/guide/component.md @@ -37,7 +37,7 @@ Downloading 'mnist' ... If the execution is successful, the component is marked as finished. ```python ->>> mnist.is_finished() +>>> mnist.execution.is_finished() True ``` diff --git a/docs/guide/introduction.md b/docs/guide/introduction.md index b6fee170..1c945819 100644 --- a/docs/guide/introduction.md +++ b/docs/guide/introduction.md @@ -35,7 +35,7 @@ class EstimatePi(Component): ) def summary(self): - if self.is_finished(): + if self.execution.is_finished(): print( f"After {self.config.samples} samples, " f"PI is approximately {self.load_file('result.json')['pi']}." diff --git a/poetry.lock b/poetry.lock index c16031c1..35f6703d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -142,63 +142,72 @@ files = [ [[package]] name = "coverage" -version = "7.2.6" +version = "7.2.7" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:496b86f1fc9c81a1cd53d8842ef712e950a4611bba0c42d33366a7b91ba969ec"}, - {file = "coverage-7.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbe6e8c0a9a7193ba10ee52977d4d5e7652957c1f56ccefed0701db8801a2a3b"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d06b721c2550c01a60e5d3093f417168658fb454e5dfd9a23570e9bffe39a1"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77a04b84d01f0e12c66f16e69e92616442dc675bbe51b90bfb074b1e5d1c7fbd"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35db06450272473eab4449e9c2ad9bc6a0a68dab8e81a0eae6b50d9c2838767e"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6727a0d929ff0028b1ed8b3e7f8701670b1d7032f219110b55476bb60c390bfb"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aac1d5fdc5378f6bac2c0c7ebe7635a6809f5b4376f6cf5d43243c1917a67087"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c9e4a5eb1bbc3675ee57bc31f8eea4cd7fb0cbcbe4912cf1cb2bf3b754f4a80"}, - {file = "coverage-7.2.6-cp310-cp310-win32.whl", hash = "sha256:71f739f97f5f80627f1fee2331e63261355fd1e9a9cce0016394b6707ac3f4ec"}, - {file = "coverage-7.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:fde5c7a9d9864d3e07992f66767a9817f24324f354caa3d8129735a3dc74f126"}, - {file = "coverage-7.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bc7b667f8654376e9353dd93e55e12ce2a59fb6d8e29fce40de682273425e044"}, - {file = "coverage-7.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:697f4742aa3f26c107ddcb2b1784a74fe40180014edbd9adaa574eac0529914c"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:541280dde49ce74a4262c5e395b48ea1207e78454788887118c421cb4ffbfcac"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7f1a8328eeec34c54f1d5968a708b50fc38d31e62ca8b0560e84a968fbf9a9"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bbd58eb5a2371bf160590f4262109f66b6043b0b991930693134cb617bc0169"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ae82c5f168d2a39a5d69a12a69d4dc23837a43cf2ca99be60dfe59996ea6b113"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f5440cdaf3099e7ab17a5a7065aed59aff8c8b079597b61c1f8be6f32fe60636"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6f03f87fea579d55e0b690d28f5042ec1368650466520fbc400e7aeaf09e995"}, - {file = "coverage-7.2.6-cp311-cp311-win32.whl", hash = "sha256:dc4d5187ef4d53e0d4c8eaf530233685667844c5fb0b855fea71ae659017854b"}, - {file = "coverage-7.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:c93d52c3dc7b9c65e39473704988602300e3cc1bad08b5ab5b03ca98bbbc68c1"}, - {file = "coverage-7.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:42c692b55a647a832025a4c048007034fe77b162b566ad537ce65ad824b12a84"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7786b2fa7809bf835f830779ad285215a04da76293164bb6745796873f0942d"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25bad4196104761bc26b1dae9b57383826542ec689ff0042f7f4f4dd7a815cba"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2692306d3d4cb32d2cceed1e47cebd6b1d2565c993d6d2eda8e6e6adf53301e6"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:392154d09bd4473b9d11351ab5d63391f3d5d24d752f27b3be7498b0ee2b5226"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fa079995432037b5e2ef5ddbb270bcd2ded9f52b8e191a5de11fe59a00ea30d8"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d712cefff15c712329113b01088ba71bbcef0f7ea58478ca0bbec63a824844cb"}, - {file = "coverage-7.2.6-cp37-cp37m-win32.whl", hash = "sha256:004948e296149644d208964300cb3d98affc5211e9e490e9979af4030b0d6473"}, - {file = "coverage-7.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:c1d7a31603c3483ac49c1726723b0934f88f2c011c660e6471e7bd735c2fa110"}, - {file = "coverage-7.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3436927d1794fa6763b89b60c896f9e3bd53212001026ebc9080d23f0c2733c1"}, - {file = "coverage-7.2.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44c9b9f1a245f3d0d202b1a8fa666a80b5ecbe4ad5d0859c0fb16a52d9763224"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e3783a286d5a93a2921396d50ce45a909aa8f13eee964465012f110f0cbb611"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cff6980fe7100242170092bb40d2b1cdad79502cd532fd26b12a2b8a5f9aee0"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c534431153caffc7c495c3eddf7e6a6033e7f81d78385b4e41611b51e8870446"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3062fd5c62df988cea9f2972c593f77fed1182bfddc5a3b12b1e606cb7aba99e"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6284a2005e4f8061c58c814b1600ad0074ccb0289fe61ea709655c5969877b70"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:97729e6828643f168a2a3f07848e1b1b94a366b13a9f5aba5484c2215724edc8"}, - {file = "coverage-7.2.6-cp38-cp38-win32.whl", hash = "sha256:dc11b42fa61ff1e788dd095726a0aed6aad9c03d5c5984b54cb9e1e67b276aa5"}, - {file = "coverage-7.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:cbcc874f454ee51f158afd604a315f30c0e31dff1d5d5bf499fc529229d964dd"}, - {file = "coverage-7.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d3cacc6a665221108ecdf90517a8028d07a2783df3417d12dcfef1c517e67478"}, - {file = "coverage-7.2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:272ab31228a9df857ab5df5d67936d8861464dc89c5d3fab35132626e9369379"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a8723ccec4e564d4b9a79923246f7b9a8de4ec55fa03ec4ec804459dade3c4f"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5906f6a84b47f995cd1bf0aca1c72d591c55ee955f98074e93660d64dfc66eb9"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52c139b7ab3f0b15f9aad0a3fedef5a1f8c0b2bdc291d88639ca2c97d3682416"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a5ffd45c6b93c23a8507e2f436983015c6457aa832496b6a095505ca2f63e8f1"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4f3c7c19581d471af0e9cb49d928172cd8492cd78a2b7a4e82345d33662929bb"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e8c0e79820cdd67978e1120983786422d279e07a381dbf89d03bbb23ec670a6"}, - {file = "coverage-7.2.6-cp39-cp39-win32.whl", hash = "sha256:13cde6bb0e58fb67d09e2f373de3899d1d1e866c5a9ff05d93615f2f54fbd2bb"}, - {file = "coverage-7.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:6b9f64526286255735847aed0221b189486e0b9ed943446936e41b7e44b08783"}, - {file = "coverage-7.2.6-pp37.pp38.pp39-none-any.whl", hash = "sha256:6babcbf1e66e46052442f10833cfc4a0d3554d8276aa37af8531a83ed3c1a01d"}, - {file = "coverage-7.2.6.tar.gz", hash = "sha256:2025f913f2edb0272ef15d00b1f335ff8908c921c8eb2013536fcaf61f5a683d"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] [package.dependencies] @@ -236,13 +245,13 @@ files = [ [[package]] name = "editorconfig-checker" -version = "2.7.1" +version = "2.7.2" description = "Python wrapper around invoking editorconfig-checker (https://github.com/editorconfig-checker/editorconfig-checker)" category = "dev" optional = false python-versions = ">=2.7" files = [ - {file = "editorconfig-checker-2.7.1.tar.gz", hash = "sha256:fde558289133b8fc4d0bcb54729c0e8fa4c688873b685094c69916554d79a3f8"}, + {file = "editorconfig-checker-2.7.2.tar.gz", hash = "sha256:43598be8601d157bc9ad5e4304d96dcabfd905fe6fb801f01636bb03f1b71e85"}, ] [[package]] @@ -262,19 +271,19 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.12.0" +version = "3.12.2" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"}, - {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"}, + {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, + {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "flatten-dict" @@ -309,14 +318,14 @@ license = ["ukkonen"] [[package]] name = "importlib-metadata" -version = "6.6.0" +version = "6.7.0" description = "Read metadata from Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, - {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, ] [package.dependencies] @@ -326,7 +335,7 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "iniconfig" @@ -443,22 +452,22 @@ files = [ [[package]] name = "platformdirs" -version = "3.5.1" +version = "3.7.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, - {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, + {file = "platformdirs-3.7.0-py3-none-any.whl", hash = "sha256:cfd065ba43133ff103ab3bd10aecb095c2a0035fcd1f07217c9376900d94ba07"}, + {file = "platformdirs-3.7.0.tar.gz", hash = "sha256:87fbf6473e87c078d536980ba970a472422e94f17b752cfad17024c18876d481"}, ] [package.dependencies] -typing-extensions = {version = ">=4.5", markers = "python_version < \"3.8\""} +typing-extensions = {version = ">=4.6.3", markers = "python_version < \"3.8\""} [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -501,48 +510,48 @@ virtualenv = ">=20.10.0" [[package]] name = "pydantic" -version = "1.10.8" +version = "1.10.9" description = "Data validation and settings management using python type hints" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"}, - {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"}, - {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"}, - {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"}, - {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"}, - {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"}, - {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"}, - {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"}, - {file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"}, - {file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"}, + {file = "pydantic-1.10.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e692dec4a40bfb40ca530e07805b1208c1de071a18d26af4a2a0d79015b352ca"}, + {file = "pydantic-1.10.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c52eb595db83e189419bf337b59154bdcca642ee4b2a09e5d7797e41ace783f"}, + {file = "pydantic-1.10.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939328fd539b8d0edf244327398a667b6b140afd3bf7e347cf9813c736211896"}, + {file = "pydantic-1.10.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b48d3d634bca23b172f47f2335c617d3fcb4b3ba18481c96b7943a4c634f5c8d"}, + {file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f0b7628fb8efe60fe66fd4adadd7ad2304014770cdc1f4934db41fe46cc8825f"}, + {file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e1aa5c2410769ca28aa9a7841b80d9d9a1c5f223928ca8bec7e7c9a34d26b1d4"}, + {file = "pydantic-1.10.9-cp310-cp310-win_amd64.whl", hash = "sha256:eec39224b2b2e861259d6f3c8b6290d4e0fbdce147adb797484a42278a1a486f"}, + {file = "pydantic-1.10.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d111a21bbbfd85c17248130deac02bbd9b5e20b303338e0dbe0faa78330e37e0"}, + {file = "pydantic-1.10.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e9aec8627a1a6823fc62fb96480abe3eb10168fd0d859ee3d3b395105ae19a7"}, + {file = "pydantic-1.10.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07293ab08e7b4d3c9d7de4949a0ea571f11e4557d19ea24dd3ae0c524c0c334d"}, + {file = "pydantic-1.10.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee829b86ce984261d99ff2fd6e88f2230068d96c2a582f29583ed602ef3fc2c"}, + {file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b466a23009ff5cdd7076eb56aca537c745ca491293cc38e72bf1e0e00de5b91"}, + {file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7847ca62e581e6088d9000f3c497267868ca2fa89432714e21a4fb33a04d52e8"}, + {file = "pydantic-1.10.9-cp311-cp311-win_amd64.whl", hash = "sha256:7845b31959468bc5b78d7b95ec52fe5be32b55d0d09983a877cca6aedc51068f"}, + {file = "pydantic-1.10.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:517a681919bf880ce1dac7e5bc0c3af1e58ba118fd774da2ffcd93c5f96eaece"}, + {file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67195274fd27780f15c4c372f4ba9a5c02dad6d50647b917b6a92bf00b3d301a"}, + {file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2196c06484da2b3fded1ab6dbe182bdabeb09f6318b7fdc412609ee2b564c49a"}, + {file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6257bb45ad78abacda13f15bde5886efd6bf549dd71085e64b8dcf9919c38b60"}, + {file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3283b574b01e8dbc982080d8287c968489d25329a463b29a90d4157de4f2baaf"}, + {file = "pydantic-1.10.9-cp37-cp37m-win_amd64.whl", hash = "sha256:5f8bbaf4013b9a50e8100333cc4e3fa2f81214033e05ac5aa44fa24a98670a29"}, + {file = "pydantic-1.10.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9cd67fb763248cbe38f0593cd8611bfe4b8ad82acb3bdf2b0898c23415a1f82"}, + {file = "pydantic-1.10.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f50e1764ce9353be67267e7fd0da08349397c7db17a562ad036aa7c8f4adfdb6"}, + {file = "pydantic-1.10.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73ef93e5e1d3c8e83f1ff2e7fdd026d9e063c7e089394869a6e2985696693766"}, + {file = "pydantic-1.10.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:128d9453d92e6e81e881dd7e2484e08d8b164da5507f62d06ceecf84bf2e21d3"}, + {file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ad428e92ab68798d9326bb3e5515bc927444a3d71a93b4a2ca02a8a5d795c572"}, + {file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fab81a92f42d6d525dd47ced310b0c3e10c416bbfae5d59523e63ea22f82b31e"}, + {file = "pydantic-1.10.9-cp38-cp38-win_amd64.whl", hash = "sha256:963671eda0b6ba6926d8fc759e3e10335e1dc1b71ff2a43ed2efd6996634dafb"}, + {file = "pydantic-1.10.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:970b1bdc6243ef663ba5c7e36ac9ab1f2bfecb8ad297c9824b542d41a750b298"}, + {file = "pydantic-1.10.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7e1d5290044f620f80cf1c969c542a5468f3656de47b41aa78100c5baa2b8276"}, + {file = "pydantic-1.10.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83fcff3c7df7adff880622a98022626f4f6dbce6639a88a15a3ce0f96466cb60"}, + {file = "pydantic-1.10.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0da48717dc9495d3a8f215e0d012599db6b8092db02acac5e0d58a65248ec5bc"}, + {file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0a2aabdc73c2a5960e87c3ffebca6ccde88665616d1fd6d3db3178ef427b267a"}, + {file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9863b9420d99dfa9c064042304868e8ba08e89081428a1c471858aa2af6f57c4"}, + {file = "pydantic-1.10.9-cp39-cp39-win_amd64.whl", hash = "sha256:e7c9900b43ac14110efa977be3da28931ffc74c27e96ee89fbcaaf0b0fe338e1"}, + {file = "pydantic-1.10.9-py3-none-any.whl", hash = "sha256:6cafde02f6699ce4ff643417d1a9223716ec25e228ddc3b436fe7e2d25a1f305"}, + {file = "pydantic-1.10.9.tar.gz", hash = "sha256:95c70da2cd3b6ddf3b9645ecaa8d98f3d80c606624b6d245558d202cd23ea3be"}, ] [package.dependencies] @@ -554,14 +563,14 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pytest" -version = "7.3.1" +version = "7.3.2" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"}, - {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"}, + {file = "pytest-7.3.2-py3-none-any.whl", hash = "sha256:cdcbd012c9312258922f8cd3f1b62a6580fdced17db6014896053d47cddf9295"}, + {file = "pytest-7.3.2.tar.gz", hash = "sha256:ee990a3cc55ba808b80795a79944756f315c67c12b56abd3ac993a7b8c17030b"}, ] [package.dependencies] @@ -574,7 +583,7 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" @@ -677,14 +686,14 @@ files = [ [[package]] name = "setuptools" -version = "67.8.0" +version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"}, - {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"}, + {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, + {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, ] [package.extras] @@ -776,25 +785,25 @@ files = [ [[package]] name = "virtualenv" -version = "20.23.0" +version = "20.23.1" description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.23.0-py3-none-any.whl", hash = "sha256:6abec7670e5802a528357fdc75b26b9f57d5d92f29c5462ba0fbe45feacc685e"}, - {file = "virtualenv-20.23.0.tar.gz", hash = "sha256:a85caa554ced0c0afbd0d638e7e2d7b5f92d23478d05d17a76daeac8f279f924"}, + {file = "virtualenv-20.23.1-py3-none-any.whl", hash = "sha256:34da10f14fea9be20e0fd7f04aba9732f84e593dac291b757ce42e3368a39419"}, + {file = "virtualenv-20.23.1.tar.gz", hash = "sha256:8ff19a38c1021c742148edc4f81cb43d7f8c6816d2ede2ab72af5b84c749ade1"}, ] [package.dependencies] distlib = ">=0.3.6,<1" -filelock = ">=3.11,<4" -importlib-metadata = {version = ">=6.4.1", markers = "python_version < \"3.8\""} -platformdirs = ">=3.2,<4" +filelock = ">=3.12,<4" +importlib-metadata = {version = ">=6.6", markers = "python_version < \"3.8\""} +platformdirs = ">=3.5.1,<4" [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.7.1)", "time-machine (>=2.9)"] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezer (>=0.4.6)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.8)", "time-machine (>=2.9)"] [[package]] name = "zipp" @@ -818,4 +827,4 @@ all = [] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "d450cf61506d2ac99333ad950484636d6cb0c439550c1a4cc4fba9e17d5511f4" +content-hash = "ceb8fc594b0ba5e9513124fadc93189e28e2fe12200ce6c9158e107dc13f1813" diff --git a/pyproject.toml b/pyproject.toml index dc61e8a7..20fc2125 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ flatten-dict = "^0.4" jsonlines = "^3.1" pydantic = "^1.10.8" arrow = "^1.2" -importlib-metadata = {version = "^6.6", python = "<3.8"} +importlib-metadata = {version = "^6.7", python = "<3.8"} omegaconf = "^2.3.0" dill = "^0.3.6" typing-extensions = {version = "^4.5.0", python = "<3.11"} @@ -40,7 +40,7 @@ pyupgrade = "^3.3" black = "^23.3.0" pytest = "^7.3" pre-commit = "^2.21.0" -editorconfig-checker = "^2.7.1" +editorconfig-checker = "^2.7.2" pytest-cov = "^4.1.0" [tool.poetry.extras] diff --git a/src/machinable/__init__.py b/src/machinable/__init__.py index 1f0cd123..dbc9821c 100644 --- a/src/machinable/__init__.py +++ b/src/machinable/__init__.py @@ -39,7 +39,7 @@ def get( module: Union[str, Element, None] = None, version: VersionType = None, - predicate: Optional[str] = get_settings().default_predicate, + predicate: Optional[str] = "$", **kwargs, ) -> Interface: return Interface.get(module, version, predicate, **kwargs) diff --git a/src/machinable/collection.py b/src/machinable/collection.py index b19bc984..a1a97918 100644 --- a/src/machinable/collection.py +++ b/src/machinable/collection.py @@ -9,7 +9,6 @@ from json import dumps from pprint import pprint -from machinable.settings import get_settings from machinable.types import VersionType long = int @@ -1384,7 +1383,7 @@ def filter_by_predicate( self, module: str, version: VersionType = None, - predicate: str = get_settings().default_predicate, + predicate: str = "$", **kwargs, ): from machinable import Element @@ -1407,7 +1406,7 @@ def singleton( self, module: str, version: VersionType = None, - predicate: str = get_settings().default_predicate, + predicate: str = "$", **kwargs, ) -> Union[Any, "Component"]: from machinable import Element @@ -1445,6 +1444,11 @@ def launch(self) -> "ComponentCollection": return self + +class ExecutionCollection(ElementCollection): + def __str__(self): + return f"Executions <{len(self.items)}>" + def status(self, status="started"): """Filters the collection by a status attribute @@ -1455,8 +1459,3 @@ def status(self, status="started"): return self.filter(lambda item: getattr(item, "is_" + status)()) except AttributeError as _ex: raise ValueError(f"Invalid status field: {status}") from _ex - - -class ExecutionCollection(ElementCollection): - def __str__(self): - return f"Executions <{len(self.items)}>" diff --git a/src/machinable/component.py b/src/machinable/component.py index 08d3e29c..8615e56f 100644 --- a/src/machinable/component.py +++ b/src/machinable/component.py @@ -1,10 +1,10 @@ from typing import TYPE_CHECKING, List, Optional, Union +import os import random import sys import threading -import arrow from machinable.settings import get_settings if sys.version_info >= (3, 11): @@ -12,22 +12,18 @@ else: from typing_extensions import Self -if sys.version_info >= (3, 8): - from typing import Literal -else: - from typing_extensions import Literal - from typing import Dict from machinable import errors, schema from machinable.collection import ComponentCollection, ExecutionCollection from machinable.element import _CONNECTIONS as connected_elements -from machinable.element import Element, get_dump, get_lineage +from machinable.element import get_dump, get_lineage +from machinable.index import Index from machinable.interface import Interface, belongs_to, belongs_to_many from machinable.project import Project from machinable.storage import Storage -from machinable.types import DatetimeType, TimestampType, VersionType -from machinable.utils import generate_seed, load_file, save_file +from machinable.types import VersionType +from machinable.utils import generate_seed if TYPE_CHECKING: from machinable.execution import Execution @@ -56,6 +52,15 @@ def __init__( lineage=get_lineage(self), ) self.__model__._dump = get_dump(self) + self._current_execution_context = None + + @property + def current_execution_context(self) -> "Execution": + if self._current_execution_context is None: + from machinable.execution import Execution + + self._current_execution_context = Execution.get() + return self._current_execution_context @belongs_to_many(key="execution_history") def executions() -> ExecutionCollection: @@ -63,48 +68,66 @@ def executions() -> ExecutionCollection: return Execution - @belongs_to(key="execution_history", cached=False) - def execution() -> "Execution": + @property + def execution(self) -> "Execution": from machinable.execution import Execution - return Execution + related = None + if self.is_mounted(): + # if mounted, search for related, most recent execution + related = Index.get().find_related( + relation="Execution.Component.execution_history", + uuid=self.uuid, + inverse=True, + ) - @property - def seed(self) -> int: - return self.__model__.seed + if related is not None and len(related) > 0: + related = Interface.find( + sorted(related, key=lambda x: x.timestamp, reverse=True)[ + 0 + ].uuid + ) + else: + related = None - @property - def nickname(self) -> str: - return self.__model__.nickname + # use context if no related execution was found + if related is None: + if Execution.is_connected(): + related = Execution.get() + else: + related = self.current_execution_context + + related.of(self) + + return related def launch(self) -> Self: from machinable.execution import Execution - execution = Execution.get() - - execution.add(self) + self.execution.add(self) if Execution.is_connected(): # commit only, defer execution + Execution.get().add(self) self.commit() else: - execution.dispatch() + self.current_execution_context.add(self) + self.current_execution_context.dispatch() return self + @property + def seed(self) -> int: + return self.__model__.seed + + @property + def nickname(self) -> str: + return self.__model__.nickname + @classmethod def collect(cls, components) -> "ComponentCollection": return ComponentCollection(components) - def resources( - self, execution: Optional["Execution"] = None - ) -> Optional[Dict]: - if execution is None: - execution = self.execution - if execution is None: - return None - return self.load_file(f"resources/{execution.id}.json", None) - def dispatch(self) -> Self: """Dispatch the component lifecycle""" writes_meta_data = ( @@ -117,9 +140,13 @@ def dispatch(self) -> Self: # meta-data if writes_meta_data: - self.update_status("started") - self.save_file( - "host.json", + if not self.execution.is_started(): + self.execution.update_status(status="started") + else: + self.execution.update_status(status="resumed") + + self.execution.save_file( + [self.id, "host.json"], data=Project.get().provider().get_host_info(), ) @@ -129,7 +156,7 @@ def beat(): t.start() self.on_heartbeat() if self.on_write_meta_data() is not False and self.is_mounted(): - self.update_status("heartbeat") + self.execution.update_status(status="heartbeat") return t heartbeat = beat() @@ -143,7 +170,8 @@ def beat(): heartbeat.cancel() if writes_meta_data: - self.update_status("finished") + self.execution.update_status(status="finished") + self.cached(True, reason="finished") self.on_after_dispatch(success=True) except BaseException as _ex: # pylint: disable=broad-except @@ -159,12 +187,18 @@ def beat(): for storage in Storage.connected(): storage.update(self) - @property - def host_info(self) -> Optional[Dict]: - return self.load_file("host.json", None) + def cached( + self, cached: Optional[bool] = None, reason: str = "user" + ) -> bool: + if cached is None: + return self.load_file("cached", None) is not None + elif cached is True: + self.save_file("cached", str(reason)) + return True + elif cached is False: + os.remove(self.local_directory("cached"), ignore_errors=True) - def cached(self) -> bool: - return self.is_finished() + return cached def dispatch_code(self, inline: bool = True) -> Optional[str]: connections = [f"Project('{Project.get().path()}').__enter__()"] @@ -188,143 +222,11 @@ def dispatch_code(self, inline: bool = True) -> Optional[str]: return code.replace(" ", "")[1:-1] - def output(self, incremental: bool = False) -> Optional[str]: - """Returns the output log""" - if not self.is_mounted(): - return None - if incremental: - read_length = self._cache.get("output_read_length", 0) - if read_length == -1: - return "" - output = self.load_file("output.log", None) - if output is None: - return None - - if self.is_finished(): - self._cache["output_read_length"] = -1 - else: - self._cache["output_read_length"] = len(output) - return output[read_length:] - - if "output" in self._cache: - return self._cache["output"] - - output = self.load_file("output.log", None) - - if self.is_finished(): - self._cache["output"] = output - - return output - - def update_status( - self, - status: Literal["started", "heartbeat", "finished"] = "heartbeat", - timestamp: Optional[TimestampType] = None, - ) -> None: - if timestamp is None: - timestamp = arrow.now() - if isinstance(timestamp, arrow.Arrow): - timestamp = arrow.get(timestamp) - - if status == "started": - save_file( - self.local_directory("started_at"), - str(timestamp) + "\n", - # starting event can occur multiple times - mode="a", - ) - elif status == "heartbeat": - save_file( - self.local_directory("heartbeat_at"), - str(timestamp), - mode="w", - ) - elif status == "finished": - save_file( - self.local_directory("finished_at"), - str(timestamp), - mode="w", - ) - else: - raise ValueError( - f"Invalid status {status}; must be one of 'started', 'heartbeat', 'finished'" - ) - - def created_at(self) -> Optional[DatetimeType]: - if self.timestamp is None: - return None - - return arrow.get(self.timestamp) - - def started_at(self) -> Optional[DatetimeType]: - """Returns the starting time""" - if not self.is_mounted(): - return None - return self._retrieve_status("started") - - def heartbeat_at(self): - """Returns the last heartbeat time""" - if not self.is_mounted(): - return None - return self._retrieve_status("heartbeat") - - def finished_at(self): - """Returns the finishing time""" - if not self.is_mounted(): - return None - return self._retrieve_status("finished") - - def _retrieve_status(self, field: str) -> Optional[DatetimeType]: - fields = ["started", "heartbeat", "finished"] - if field not in fields: - raise ValueError(f"Invalid field: {field}. Must be on of {fields}") - status = load_file(self.local_directory(f"{field}_at"), default=None) - if status is None: - return None - if field == "started": - # can have multiple rows, return latest - status = status.strip("\n").split("\n")[-1] - - try: - return arrow.get(status) - except arrow.ParserError: - return None - - def is_finished(self): - """True if finishing time has been written""" - return bool(self.finished_at()) - - def is_started(self): - """True if starting time has been written""" - return bool(self.started_at()) - - def is_active(self): - """True if not finished and last heartbeat occurred less than 30 seconds ago""" - if not self.heartbeat_at(): - return False - - return (not self.is_finished()) and ( - (arrow.now() - self.heartbeat_at()).seconds < 30 - ) - - def is_live(self): - """True if active or finished""" - return self.is_finished() or self.is_active() - - def is_incomplete(self): - """Shorthand for is_started() and not (is_active() or is_finished())""" - return self.is_started() and not ( - self.is_active() or self.is_finished() - ) - # life cycle def __call__(self) -> None: ... - def on_before_commit(self) -> Optional[bool]: - """Event triggered before the commit of the component""" - def on_before_dispatch(self) -> Optional[bool]: """Event triggered before the dispatch of the component""" diff --git a/src/machinable/element.py b/src/machinable/element.py index 99fbde3e..3ccccd52 100644 --- a/src/machinable/element.py +++ b/src/machinable/element.py @@ -3,8 +3,6 @@ import collections import copy import json -import os -import stat import sys if sys.version_info >= (3, 11): @@ -16,13 +14,11 @@ import dill as pickle import machinable import omegaconf -import pydantic from machinable import schema from machinable.collection import ElementCollection from machinable.config import from_element, match_method, rewrite_config_methods from machinable.errors import ConfigurationError, MachinableError from machinable.mixin import Mixin -from machinable.settings import get_settings from machinable.types import DatetimeType, ElementType, VersionType from machinable.utils import Jsonable, sentinel, unflatten_dict, update_dict from omegaconf import DictConfig, OmegaConf @@ -218,7 +214,19 @@ def uuid_to_id(uuid: str) -> str: return result -def resolve_custom_predicate(predicate: str, element: "Element"): +def resolve_custom_predicate( + predicate: Optional[str], element: "Element" +) -> Optional[List[str]]: + # predicate may look like this "example,test,*" + # where * represents a placeholder for all the custom predicates + # that are marked with a trailing * (i.e. default predicates) + # $ is a shorthand for inferring the predicate from the element + if predicate == "$": + predicate = element.default_predicate + + if predicate is None: + return None + from machinable.project import Project custom = element.on_compute_predicate() or {} @@ -262,6 +270,7 @@ class Element(Mixin, Jsonable): kind: Optional[str] = "Element" default: Optional["Element"] = None + default_predicate: Optional[str] = "config,*" _module_: Optional[str] = None def __init__(self, version: VersionType = None): @@ -297,7 +306,7 @@ def id(self) -> str: def timestamp(self) -> float: return self.__model__.timestamp - def timestamp_at(self) -> DatetimeType: + def created_at(self) -> DatetimeType: return arrow.get(self.__model__.timestamp) def version( @@ -346,7 +355,7 @@ def get( cls, module: Union[str, "Element", None] = None, version: VersionType = None, - predicate: Optional[str] = get_settings().default_predicate, + predicate: Optional[str] = "$", **kwargs, ) -> "Element": if module is None and version is None: @@ -400,7 +409,7 @@ def singleton( cls, module: Union[str, "Element"], version: VersionType = None, - predicate: Optional[str] = get_settings().default_predicate, + predicate: Optional[str] = "$", **kwargs, ) -> "Element": # no-op as elements do not have a storage representation @@ -569,7 +578,11 @@ def model(cls, element: Optional[Any] = None) -> schema.Element: return getattr(schema, cls.kind) def matches(self, element: "Element", predicate: str) -> bool: - for p in resolve_custom_predicate(predicate, element): + predicate_fields = resolve_custom_predicate(predicate, element) + if predicate_fields is None: + return False + + for p in predicate_fields: if not equalversion(self.predicate[p], element.predicate[p]): return False @@ -675,7 +688,7 @@ def __repr__(self): return f"{self.kind} [{self.id}]" def __str__(self): - return self.__repr__() + return self.id def __eq__(self, other): return self.uuid == other.uuid diff --git a/src/machinable/execution.py b/src/machinable/execution.py index 01185de2..0b4220d5 100644 --- a/src/machinable/execution.py +++ b/src/machinable/execution.py @@ -1,29 +1,54 @@ from typing import Any, Dict, List, Optional, Union import copy +import os import sys +import time if sys.version_info >= (3, 11): from typing import Self else: from typing_extensions import Self +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + +import arrow from machinable import schema -from machinable.collection import ComponentCollection +from machinable.collection import ComponentCollection, ExecutionCollection from machinable.component import Component from machinable.element import extract, get_dump, get_lineage from machinable.errors import ExecutionFailed from machinable.interface import Interface, has_many, has_one -from machinable.project import Project from machinable.schedule import Schedule from machinable.settings import get_settings -from machinable.types import ElementType, VersionType -from machinable.utils import sentinel, update_dict +from machinable.types import ( + DatetimeType, + ElementType, + TimestampType, + VersionType, +) +from machinable.utils import save_file, update_dict + +_allowed_status = ["started", "heartbeat", "finished", "resumed"] + + +def _assert_allowed(status: str): + if status not in _allowed_status: + raise ValueError( + f"Invalid status '{status}'; must be one of {_allowed_status}" + ) + + +StatusType = Literal["started", "heartbeat", "finished", "resumed"] class Execution(Interface): kind = "Execution" default = get_settings().default_execution + default_predicate: Optional[str] = None def __init__( self, @@ -48,6 +73,11 @@ def __init__( schedule = Schedule.make(*extract(schedule)) self.push_related("schedule", schedule) self._starred_predicates = {"resources": None} + self._executable_ = None + + @classmethod + def collect(cls, executions) -> "ExecutionCollection": + return ExecutionCollection(executions) def compute_predicate(self) -> Dict: predicates = super().compute_predicate() @@ -62,6 +92,27 @@ def schedule() -> "Schedule": def executables() -> ComponentCollection: return Component + def executable( + self, executable: Optional[Component] = None + ) -> Optional[Component]: + if self._executable_ is not None: + return self._executable_ + + # auto-select executable if only one is available + if len(self.executables) == 1: + return self.executables[0] + + if executable is not None: + return executable + + raise ValueError( + "No executable selected. Call `execution.of(executable)` first, or pass an executable argument." + ) + + def of(self, executable: Union[None, Component]) -> Self: + self._executable_ = executable + return self + @property def pending_executables(self) -> ComponentCollection: return self.executables.filter(lambda e: not e.cached()) @@ -69,17 +120,13 @@ def pending_executables(self) -> ComponentCollection: def add( self, executable: Union[Component, List[Component]], - once: bool = False, ) -> Self: if isinstance(executable, (list, tuple)): for _executable in executable: self.add(_executable) return self - if once and self.__related__["executables"].contains( - lambda x: x == executable - ): - # already added + if self.executables.contains(lambda x: x == executable): return self self.push_related("executables", executable) @@ -92,22 +139,35 @@ def commit(self) -> Self: return super().commit() - def resources(self, resources: Dict = sentinel) -> Optional[Dict]: - if resources is sentinel: - return self.__model__.resources + def resources(self, executable: Optional["Component"] = None) -> Dict: + try: + executable = self.executable(executable) + except ValueError: + pass + + resources = {} + default = copy.deepcopy(self.__model__.resources) + + if executable is None: + resources = default + else: + resources = self.load_file( + [executable, "resources.json"], default={} + ) - self.__model__.resources = resources + resources["_default_"] = default - return self.__model__.resources + return resources def canonicalize_resources(self, resources: Dict) -> Dict: return resources - def default_resources(self, executable: "Component") -> Optional[dict]: - """Default resources""" - - def compute_resources(self, executable: "Component") -> Dict: - default_resources = self.default_resources(executable) + def compute_resources( + self, executable: Optional["Component"] = None + ) -> Dict: + default_resources = self.on_compute_default_resources( + self.executable(executable) + ) if not self.__model__.resources and default_resources is not None: return self.canonicalize_resources(default_resources) @@ -163,8 +223,8 @@ def dispatch(self) -> Self: try: # compute resources for executable in self.pending_executables: - executable.save_file( - f"resources/{self.id}.json", + self.save_file( + [executable, "resources.json"], self.compute_resources(executable), ) self.__call__() @@ -178,6 +238,204 @@ def __call__(self) -> None: for executable in self.pending_executables: executable.dispatch() + @property + def host_info( + self, + executable: Optional["Component"] = None, + ) -> Optional[Dict]: + executable = self.executable(executable) + return self.load_file([executable.id, "host.json"], None) + + def output( + self, + executable: Optional["Component"] = None, + incremental: bool = False, + ) -> Optional[str]: + """Returns the output log""" + if not self.is_mounted(): + return None + + executable = self.executable(executable) + + p = os.path.join(executable.id, "output.log") + + if incremental: + read_length = self._cache.get(f"{p}.read_length", 0) + if read_length == -1: + return "" + output = self.load_file(p, None) + if output is None: + return None + + if self.is_finished(executable): + self._cache[f"{p}.read_length"] = -1 + else: + self._cache[f"{p}.read_length"] = len(output) + return output[read_length:] + + if p in self._cache: + return self._cache[p] + + output = self.load_file(p, None) + + if self.is_finished(executable): + self._cache[p] = output + + return output + + def stream_output( + self, + executable: Optional["Component"] = None, + refresh_every: Union[int, float] = 1, + stream=print, + ): + executable = self.executable(executable) + while not self.is_started(executable) or self.is_active(executable): + output = self.output(executable, incremental=True) + if output: + stream(output) + time.sleep(refresh_every) + + def update_status( + self, + executable: Optional["Component"] = None, + status: StatusType = "heartbeat", + timestamp: Optional[TimestampType] = None, + ) -> TimestampType: + _assert_allowed(status) + executable = self.executable(executable) + + if timestamp is None: + timestamp = arrow.now() + if isinstance(timestamp, arrow.Arrow): + timestamp = arrow.get(timestamp) + + # resumed event can occur multiple times + multiple = status == "resumed" + + save_file( + self.local_directory(executable.id, f"{status}_at"), + str(timestamp) + ("\n" if multiple else ""), + mode="a" if multiple else "w", + ) + + return timestamp + + def retrieve_status( + self, + executable: Optional["Component"] = None, + status: StatusType = "heartbeat", + ) -> Optional[DatetimeType]: + _assert_allowed(status) + executable = self.executable(executable) + + status = self.load_file([executable, f"{status}_at"], default=None) + if status is None: + return None + + multiple = status == "resumed" + + if multiple: + # can have multiple rows, return latest + status = status.strip("\n").split("\n")[-1] + + try: + return arrow.get(status) + except arrow.ParserError: + return None + + def started_at( + self, executable: Optional["Component"] = None + ) -> Optional[DatetimeType]: + """Returns the starting time""" + if not self.is_mounted(): + return None + return self.retrieve_status(self.executable(executable), "started") + + def resumed_at( + self, + executable: Optional["Component"] = None, + ) -> Optional[DatetimeType]: + """Returns the resumed time""" + if not self.is_mounted(): + return None + return self.retrieve_status(self.executable(executable), "resumed") + + def heartbeat_at(self, executable: Optional["Component"] = None): + """Returns the last heartbeat time""" + if not self.is_mounted(): + return None + return self.retrieve_status(self.executable(executable), "heartbeat") + + def finished_at( + self, + executable: Optional["Component"] = None, + ): + """Returns the finishing time""" + if not self.is_mounted(): + return None + return self.retrieve_status(self.executable(executable), "finished") + + def is_finished( + self, + executable: Optional["Component"] = None, + ): + """True if finishing time has been written""" + return bool(self.finished_at(self.executable(executable))) + + def is_started( + self, + executable: Optional["Component"] = None, + ): + """True if starting time has been written""" + return bool( + self.started_at( + self.executable(executable), + ) + ) + + def is_resumed( + self, + executable: Optional["Component"] = None, + ): + """True if resumed time has been written""" + return bool( + self.resumed_at( + self.executable(executable), + ) + ) + + def is_active( + self, + executable: Optional["Component"] = None, + ): + """True if not finished and last heartbeat occurred less than 30 seconds ago""" + executable = self.executable(executable) + if not self.heartbeat_at( + executable, + ): + return False + + return (not self.is_finished(executable)) and ( + (arrow.now() - self.heartbeat_at(executable)).seconds < 30 + ) + + def is_live( + self, + executable: Optional["Component"] = None, + ): + """True if active or finished""" + executable = self.executable(executable) + return self.is_finished(executable) or self.is_active(executable) + + def is_incomplete( + self, + executable: Optional["Component"] = None, + ): + """Shorthand for is_started() and not is_live()""" + executable = self.executable(executable) + return self.is_started(executable) and not self.is_live(executable) + def on_verify_schedule(self) -> bool: """Event to verify compatibility of the schedule""" if self.schedule is None: @@ -191,12 +449,18 @@ def on_before_dispatch(self) -> Optional[bool]: Return False to prevent the dispatch """ - def on_before_commit(self) -> Optional[bool]: - """Event triggered before commit of an execution""" - def on_after_dispatch(self) -> None: """Event triggered after the dispatch of an execution""" + def on_compute_default_resources( + self, executable: "Component" + ) -> Optional[Dict]: + """Event triggered to compute default resources""" + + @property + def seed(self) -> int: + return self.__model__.seed + def __iter__(self): yield from self.executables diff --git a/src/machinable/index.py b/src/machinable/index.py index 1e186ebe..a954537c 100644 --- a/src/machinable/index.py +++ b/src/machinable/index.py @@ -2,6 +2,7 @@ import json import os +from contextlib import contextmanager from datetime import datetime try: @@ -87,6 +88,16 @@ def load(database: str, create=False) -> sqlite3.Connection: return db +@contextmanager +def db(database: str, create=False) -> Optional[sqlite3.Connection]: + try: + database = load(database, create) + yield database + database.close() + except FileNotFoundError: + yield None + + class Index(Interface): kind = "Index" default = get_settings().default_index @@ -123,35 +134,36 @@ def local_directory(self, uuid: str, *append: str) -> str: return os.path.join(self.config.directory, uuid, *append) def commit(self, model: schema.Interface) -> bool: - cur = self.db.cursor() - if cur.execute( - """SELECT uuid FROM 'index' WHERE uuid=?""", (model.uuid,) - ).fetchone(): - # already exists - return False - cur.execute( - """INSERT INTO 'index' ( - uuid, - kind, - module, - config, - version, - predicate, - lineage, - 'timestamp' - ) VALUES (?,?,?,?,?,?,?,?)""", - ( - model.uuid, - model.kind, - model.module, - _jn(model.config), - _jn(model.version), - _jn(model.predicate), - _jn(model.lineage), - model.timestamp, - ), - ) - self.db.commit() + with db(self.config.database, create=True) as _db: + cur = _db.cursor() + if cur.execute( + """SELECT uuid FROM 'index' WHERE uuid=?""", (model.uuid,) + ).fetchone(): + # already exists + return False + cur.execute( + """INSERT INTO 'index' ( + uuid, + kind, + module, + config, + version, + predicate, + lineage, + 'timestamp' + ) VALUES (?,?,?,?,?,?,?,?)""", + ( + model.uuid, + model.kind, + model.module, + _jn(model.config), + _jn(model.version), + _jn(model.predicate), + _jn(model.lineage), + model.timestamp, + ), + ) + _db.commit() return True def create_relation( @@ -166,79 +178,90 @@ def create_relation( for r in related_uuid: self.create_relation(relation, uuid, r, priority, timestamp) return - - cur = self.db.cursor() - if cur.execute( - """SELECT id FROM 'relations' WHERE uuid=? AND related_uuid=? AND relation=?""", - (uuid, related_uuid, relation), - ).fetchone(): - # already exists - return - if timestamp is None: - timestamp = datetime.now().timestamp() - cur.execute( - """INSERT INTO 'relations' ( - relation, - uuid, - related_uuid, - priority, - timestamp - ) VALUES (?,?,?,?,?)""", - (relation, uuid, related_uuid, priority, timestamp), - ) - self.db.commit() + with db(self.config.database, create=True) as _db: + cur = _db.cursor() + if cur.execute( + """SELECT id FROM 'relations' WHERE uuid=? AND related_uuid=? AND relation=?""", + (uuid, related_uuid, relation), + ).fetchone(): + # already exists + return + if timestamp is None: + timestamp = datetime.now().timestamp() + cur.execute( + """INSERT INTO 'relations' ( + relation, + uuid, + related_uuid, + priority, + timestamp + ) VALUES (?,?,?,?,?)""", + (relation, uuid, related_uuid, priority, timestamp), + ) + _db.commit() def find(self, uuid: str) -> Optional[schema.Interface]: - cur = self.db.cursor() - row = cur.execute( - """SELECT * FROM 'index' WHERE uuid=?""", (uuid,) - ).fetchone() - if row is None: - return None - return interface_row_factory(cur, row) + with db(self.config.database, create=False) as _db: + if not _db: + return None + cur = _db.cursor() + row = cur.execute( + """SELECT * FROM 'index' WHERE uuid=?""", (uuid,) + ).fetchone() + if row is None: + return None + return interface_row_factory(cur, row) def find_by_predicate( self, module: str, predicate: Optional[Dict] = None ) -> List[schema.Interface]: - cur = self.db.cursor() - if predicate: - keys = ["module=?"] - values = [module] - for p, v in predicate.items(): - keys.append(f"json_extract(predicate, '$.{p}')=?") - values.append(v if isinstance(v, (str, int, float)) else _jn(v)) - query = cur.execute( - """SELECT * FROM 'index' WHERE """ + (" AND ".join(keys)), - values, - ) - else: - query = cur.execute( - """SELECT * FROM 'index' WHERE module=?""", - (module,), - ) + with db(self.config.database, create=False) as _db: + if not _db: + return [] + cur = _db.cursor() + if predicate: + keys = ["module=?"] + values = [module] + for p, v in predicate.items(): + keys.append(f"json_extract(predicate, '$.{p}')=?") + values.append( + v if isinstance(v, (str, int, float)) else _jn(v) + ) + query = cur.execute( + """SELECT * FROM 'index' WHERE """ + (" AND ".join(keys)), + values, + ) + else: + query = cur.execute( + """SELECT * FROM 'index' WHERE module=?""", + (module,), + ) - return [interface_row_factory(cur, row) for row in query.fetchall()] + return [interface_row_factory(cur, row) for row in query.fetchall()] def find_related( self, relation: str, uuid: str, inverse: bool = False ) -> Union[None, List[schema.Interface]]: - cur = self.db.cursor() - if not inverse: - rows = cur.execute( - """SELECT * FROM 'index' WHERE uuid IN - ( - SELECT related_uuid FROM relations WHERE uuid=? AND relation=? - ) ORDER BY 'timestamp' ASC - """, - (uuid, relation), - ).fetchall() - else: - rows = cur.execute( - """SELECT * FROM 'index' WHERE uuid IN - ( - SELECT uuid FROM relations WHERE related_uuid=? AND relation=? ORDER BY 'timestamp' DESC - ) ORDER BY 'timestamp' ASC - """, - (uuid, relation), - ).fetchall() - return [interface_row_factory(cur, row) for row in rows or []] + with db(self.config.database, create=False) as _db: + if not _db: + return None + cur = _db.cursor() + if not inverse: + rows = cur.execute( + """SELECT * FROM 'index' WHERE uuid IN + ( + SELECT related_uuid FROM relations WHERE uuid=? AND relation=? + ) ORDER BY 'timestamp' DESC + """, + (uuid, relation), + ).fetchall() + else: + rows = cur.execute( + """SELECT * FROM 'index' WHERE uuid IN + ( + SELECT uuid FROM relations WHERE related_uuid=? AND relation=? + ) ORDER BY 'timestamp' DESC + """, + (uuid, relation), + ).fetchall() + return [interface_row_factory(cur, row) for row in rows or []] diff --git a/src/machinable/interface.py b/src/machinable/interface.py index 9a41e01c..60b4f663 100644 --- a/src/machinable/interface.py +++ b/src/machinable/interface.py @@ -15,7 +15,7 @@ import os from functools import partial, wraps -from machinable import schema +from machinable import errors, schema from machinable.collection import Collection, InterfaceCollection from machinable.element import ( Element, @@ -25,7 +25,12 @@ ) from machinable.settings import get_settings from machinable.types import VersionType -from machinable.utils import is_directory_version, load_file, save_file +from machinable.utils import ( + is_directory_version, + joinpath, + load_file, + save_file, +) from omegaconf import OmegaConf @@ -68,6 +73,8 @@ def collect(self, elements: List["Interface"]) -> Collection: def __set_name__(self, cls, name): self.cls = cls + if cls.__relations__ is None: + cls.__relations__ = {} cls.__relations__[name] = self def __get__(self, instance, owner): @@ -140,7 +147,7 @@ class Interface(Element): # class level relationship information # note that the actual data is kept # in the __related__ object propery - __relations__: Dict[str, Relation] = {} + __relations__: Optional[Dict[str, Relation]] = None def __init__( self, @@ -161,14 +168,23 @@ def __init__( # initialize relation data self.__related__ = {} self._relation_cache = {} + if self.__relations__ is None: + self.__relations__ = {} for name, relation in self.__relations__.items(): if relation.multiple: self.__related__[name] = relation.collect([]) else: self.__related__[name] = None if uses: - self.use(uses) - self.push_related("ancestor", derived_from) + if not isinstance(uses, (list, tuple)): + uses = [uses] + for use in uses: + self.__related__["uses"].append(use) + self._relation_cache["uses"] = True + + if derived_from: + self.__related__["ancestor"] = derived_from + self._relation_cache["ancestor"] = True self._deferred_data = {} @@ -177,13 +193,21 @@ def collect(cls, elements) -> InterfaceCollection: return InterfaceCollection(elements) def push_related(self, key: str, value: "Interface") -> None: - # todo: check for editablility + if self.is_committed(): + raise errors.MachinableError( + f"{repr(self)} already exists and cannot be modified." + ) if self.__relations__[key].multiple: self.__related__[key].append(value) else: self.__related__[key] = value self._relation_cache[key] = True + def is_committed(self) -> bool: + from machinable.index import Index + + return Index.get().find(self.uuid) is not None + def commit(self) -> Self: from machinable.index import Index @@ -260,23 +284,11 @@ def to_cli(self) -> str: return " ".join(cli) - def use(self, use: Union[Element, List[Element]]) -> Self: - # todo: check for editablility - - if isinstance(use, (list, tuple)): - for _use in use: - self.use(_use) - return self - - self.push_related("uses", use) - - return self - def derive( self, module: Union[str, Element, None] = None, version: VersionType = None, - predicate: Optional[str] = get_settings().default_predicate, + predicate: Optional[str] = "$", **kwargs, ) -> Self: if module is None or predicate is None: @@ -291,7 +303,7 @@ def singleton( cls, module: Union[str, "Element"], version: VersionType = None, - predicate: Optional[str] = get_settings().default_predicate, + predicate: Optional[str] = "$", **kwargs, ) -> "Collection": if module in [ @@ -352,7 +364,7 @@ def find_by_predicate( cls, module: Union[str, "Element"], version: VersionType = None, - predicate: Optional[str] = get_settings().default_predicate, + predicate: Optional[str] = "$", **kwargs, ) -> "Collection": from machinable.index import Index @@ -362,15 +374,16 @@ def find_by_predicate( except ModuleNotFoundError: return cls.collect([]) - if predicate: - predicate = OmegaConf.to_container( - OmegaConf.create( - { - p: candidate.predicate[p] - for p in resolve_custom_predicate(predicate, candidate) - } - ) + predicate_fields = resolve_custom_predicate(predicate, candidate) + + if predicate_fields is None: + return cls.collect([]) + + predicate_data = OmegaConf.to_container( + OmegaConf.create( + {p: candidate.predicate[p] for p in predicate_fields} ) + ) return cls.collect( [ @@ -379,7 +392,7 @@ def find_by_predicate( module if isinstance(module, str) else f"__session__{module.__name__}", - predicate, + predicate_data, ) ] ) @@ -391,7 +404,7 @@ def from_directory(cls, directory: str) -> "Element": Note that this does not verify the integrity of the directory. In particular, the interface may be missing or not be indexed. """ - data = load_file(os.path.join(directory, "model.json")) + data = load_file([directory, "model.json"]) model = getattr(schema, data["kind"], None) if model is None: @@ -400,22 +413,22 @@ def from_directory(cls, directory: str) -> "Element": interface = model(**data) if interface.module.startswith("__session__"): - interface._dump = load_file(os.path.join(directory, "dump.p"), None) + interface._dump = load_file([directory, "dump.p"], None) return cls.from_model(interface) def to_directory(self, directory: str, relations=True) -> Self: - save_file(os.path.join(directory, ".machinable"), self.__model__.uuid) - save_file(os.path.join(directory, "model.json"), self.__model__) + save_file([directory, ".machinable"], self.__model__.uuid) + save_file([directory, "model.json"], self.__model__) if self.__model__._dump is not None: - save_file(os.path.join(directory, "dump.p"), self.__model__._dump) + save_file([directory, "dump.p"], self.__model__._dump) if relations: for k, v in self.__related__.items(): if hasattr(v, "uuid"): - save_file(os.path.join(directory, "related", k), v.uuid) + save_file([directory, "related", k], v.uuid) elif v: save_file( - os.path.join(directory, "related", k), + [directory, "related", k], "\n".join([i.uuid for i in v]), mode="w", ) @@ -432,7 +445,10 @@ def local_directory(self, *append: str, create: bool = False) -> str: return directory - def load_file(self, filepath: str, default=None) -> Optional[Any]: + def load_file( + self, filepath: Union[str, List[str]], default=None + ) -> Optional[Any]: + filepath = joinpath(filepath) if not self.is_mounted(): # has write been deferred? if filepath in self._deferred_data: @@ -444,7 +460,9 @@ def load_file(self, filepath: str, default=None) -> Optional[Any]: return data if data is not None else default - def save_file(self, filepath: str, data: Any) -> str: + def save_file(self, filepath: Union[str, List[str]], data: Any) -> str: + filepath = joinpath(filepath) + if os.path.isabs(filepath): raise ValueError("Filepath must be relative") diff --git a/src/machinable/project.py b/src/machinable/project.py index 716a2bea..7981eace 100644 --- a/src/machinable/project.py +++ b/src/machinable/project.py @@ -296,9 +296,6 @@ def element( **constructor_kwargs, ) - if isinstance(element, Interface): - element.push_related("project", self) - return element def get_diff(self) -> Union[str, None]: diff --git a/src/machinable/schema.py b/src/machinable/schema.py index 4053ae75..520e6d72 100644 --- a/src/machinable/schema.py +++ b/src/machinable/schema.py @@ -43,6 +43,7 @@ class Project(Interface): class Execution(Interface): kind: str = "Execution" + seed: int = Field(default_factory=generate_seed) resources: Optional[Dict] = None diff --git a/src/machinable/settings.py b/src/machinable/settings.py index d2de0411..e119d4bd 100644 --- a/src/machinable/settings.py +++ b/src/machinable/settings.py @@ -9,7 +9,6 @@ class Settings(BaseModel): - default_predicate: Optional[str] = "config,*" default_execution: Optional[ElementType] = None default_component: Optional[ElementType] = None default_interface: Optional[ElementType] = None diff --git a/src/machinable/utils.py b/src/machinable/utils.py index 587244b0..96fb63cd 100644 --- a/src/machinable/utils.py +++ b/src/machinable/utils.py @@ -99,6 +99,13 @@ def is_directory_version(version: VersionType) -> bool: return False +def joinpath(filepath: Union[str, List[str]]) -> str: + if isinstance(filepath, str): + return filepath + + return os.path.join(*[str(p) for p in filepath if p is not None]) + + def random_str(length: int, random_state=None): if random_state is None or isinstance(random_state, int): random_state = random.Random(random_state) @@ -213,7 +220,7 @@ def generate_nickname(categories=None, glue="_"): def load_file( - filepath: str, + filepath: Union[str, List[str]], default: Any = sentinel, opener=open, **opener_kwargs, @@ -228,6 +235,7 @@ def load_file( opener: Customer file opener opener_kwargs: Optional arguments to pass to the opener """ + filepath = joinpath(filepath) _, ext = os.path.splitext(filepath) mode = opener_kwargs.pop("mode", "r") try: @@ -262,7 +270,7 @@ def load_file( def save_file( - filepath: str, + filepath: Union[str, List[str]], data: Any, makedirs: Union[bool, Callable] = True, opener=open, @@ -281,6 +289,7 @@ def save_file( Returns the absolute path to the written file """ + filepath = joinpath(filepath) path = os.path.dirname(filepath) name = os.path.basename(filepath) _, ext = os.path.splitext(name) diff --git a/tests/samples/project/count.py b/tests/samples/project/count.py new file mode 100644 index 00000000..79299f13 --- /dev/null +++ b/tests/samples/project/count.py @@ -0,0 +1,10 @@ +from machinable import Component + + +class Counter(Component): + def __call__(self): + self.save_file("count", self.count + 1) + + @property + def count(self): + return int(self.load_file("count", 0)) diff --git a/tests/samples/project/fail.py b/tests/samples/project/fail.py new file mode 100644 index 00000000..1e6cbb6a --- /dev/null +++ b/tests/samples/project/fail.py @@ -0,0 +1,7 @@ +from machinable import Component + + +class Fail(Component): + def __call__(self): + if not self.load_file("repaired", False): + raise Exception("Fail") diff --git a/tests/samples/project/interface/events_check.py b/tests/samples/project/interface/events_check.py index 246c0537..479d1b0e 100644 --- a/tests/samples/project/interface/events_check.py +++ b/tests/samples/project/interface/events_check.py @@ -16,17 +16,17 @@ def on_seeding(self): return False def on_success(self): - assert self.is_started() + assert self.execution.is_started() self.events.append("on_success") def __call__(self) -> None: - assert self.is_active() + assert self.execution.is_active() self.events.append("on_call") def on_after_dispatch(self, success): self.events.append("on_after_dispatch") self.save_file("events.json", self.events) - assert self.is_finished() + assert self.execution.is_finished() def on_failure(self, exception: errors.MachinableError): assert False diff --git a/tests/test_collection.py b/tests/test_collection.py index 3ae1223d..acc4be9c 100644 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -6,36 +6,28 @@ from machinable.collection import Collection, ComponentCollection, collect from machinable.component import Component +from machinable.execution import Execution from machinable.project import Project -from machinable.storage import Storage def test_collect(): assert isinstance(collect([1, 2]), Collection) -class DummyComponent(Component): +class Dummy(Component): class Config: m: int = -1 -def test_component_collection(tmp_storage): +def test_element_collection(tmp_storage): with Project("./tests/samples/project"): - collection = Component.collect( - [DummyComponent({"m": i % 2}) for i in range(5)] - ) + collection = Component.collect([Dummy({"m": i % 2}) for i in range(5)]) for i, e in enumerate(collection): e.save_file("i", i) assert isinstance(collection, ComponentCollection) collection.launch() - assert all(collection.map(lambda x: x.is_finished())) - - assert len(collection.status("finished")) == 5 - assert len(collection.status("active")) == 0 - assert len(collection.status("started")) == 5 - assert len(collection.status("incomplete")) == 0 - assert len(collection.status("started").status("active")) == 0 + assert all(collection.map(lambda x: x.execution.is_finished())) assert len(collection.filter_by_predicate("non-existent")) == 0 m = "tests.test_collection" @@ -45,6 +37,14 @@ def test_component_collection(tmp_storage): assert collection.singleton(m, {"m": 1}).load_file("i") == "1" + collection = Execution.collect([e.execution for e in collection]) + + assert len(collection.status("finished")) == 5 + assert len(collection.status("active")) == 0 + assert len(collection.status("started")) == 5 + assert len(collection.status("incomplete")) == 0 + assert len(collection.status("started").status("active")) == 0 + class CollectionTestCase(TestCase): def test_first_returns_first_item_in_collection(self): diff --git a/tests/test_component.py b/tests/test_component.py index 565e7e8a..2a0188ec 100644 --- a/tests/test_component.py +++ b/tests/test_component.py @@ -43,27 +43,6 @@ def test_component(tmp_storage): with pytest.raises(errors.MachinableError): component.version(["modify"]) - # output - c = Component().commit() - assert c.output() is None - c.save_file("output.log", "test") - assert c.output() == "test" - - assert c.output(incremental=True) == "test" - c.save_file("output.log", "testt") - assert c.output(incremental=True) == "t" - assert c.output(incremental=True) == "" - c.save_file("output.log", "testt more") - assert c.output(incremental=True) == " more" - - c.update_status("started") - assert c.is_started() - c.update_status("heartbeat") - assert c.is_active() - c.update_status("finished") - assert c.is_finished() - assert not c.is_incomplete() - p.__exit__() @@ -72,7 +51,7 @@ def test_component_launch(tmp_storage): assert not component.is_mounted() component.launch() assert component.is_mounted() - assert component.is_finished() + assert component.execution.is_finished() # multiples component = Component() @@ -80,15 +59,13 @@ def test_component_launch(tmp_storage): component.launch() component.launch() component.launch() - assert len(execution.executables) == 3 + assert len(execution.executables) == 1 with Execution(): e1 = Component().launch() - assert e1.execution is None e2 = Component().launch() - assert e2.execution is None - assert e1.is_finished() - assert e2.is_finished() + assert e1.execution.is_finished() + assert e2.execution.is_finished() assert e1.nickname != e2.nickname class Example(Component): @@ -99,9 +76,10 @@ def __call__(self): def test_component_relations(tmp_storage): - with Project("./tests/samples/project"): + with Project("./tests/samples/project") as project: component = Component.instance("basic") execution = Execution().add(component) + component.push_related("project", project) execution.dispatch() assert component.project.name() == "project" @@ -169,11 +147,11 @@ def test_component_export(tmp_storage): exec(script) e = Execution().add(component).commit() - assert not component.is_started() + assert not component.execution.is_started() exec(script) - assert component.is_finished() + assert component.execution.is_finished() assert component.load_file("test_run.json")["success"] # inline @@ -188,7 +166,7 @@ def test_component_export(tmp_storage): ["bash", script_filepath], capture_output=True, text=True, check=True ).stdout print(output) - assert component.is_finished() + assert component.execution.is_finished() assert component.load_file("test_run.json")["success"] class OuterContext(Execution): diff --git a/tests/test_element.py b/tests/test_element.py index 14d0e8ff..7dcf2659 100644 --- a/tests/test_element.py +++ b/tests/test_element.py @@ -378,6 +378,8 @@ def test_idversion(): def test_resolve_custom_predicate(): class T: + default_predicate = "default" + def __init__(self, predicate): self.predicate = predicate @@ -390,6 +392,15 @@ def on_compute_predicate(self): assert resolve_custom_predicate("test,*", T(None)) == ["test"] assert resolve_custom_predicate("* , t", T({})) == ["t"] assert resolve_custom_predicate("t,*", T({"a": "1"})) == ["t"] + assert resolve_custom_predicate("$", T(None)) == ["default"] + T.default_predicate = "default,*" + assert resolve_custom_predicate("$", T({"a*": 1, "b": 2})) == [ + "default", + "a", + ] + T.default_predicate = None + assert resolve_custom_predicate("$", T(None)) is None + assert resolve_custom_predicate(None, T({})) is None def test_uuid_to_id(): diff --git a/tests/test_execution.py b/tests/test_execution.py index 92b2d862..f4c1495d 100644 --- a/tests/test_execution.py +++ b/tests/test_execution.py @@ -1,29 +1,62 @@ import pytest -from machinable import Component, Execution, Project, errors +from machinable import Component, Execution, Project, errors, get def test_execution(tmp_storage): - assert len(Execution().add([Component(), Component()]).executables) == 2 + # no-predicate by default + e1 = get("machinable.execution", {"a": 1}).commit() + e2 = get("machinable.execution", {"a": 1}).commit() + assert e1 != e2 + execution = Execution() assert ( Execution.from_model(execution.__model__).timestamp == execution.timestamp ) - assert str(Execution()) == "Execution" - assert repr(Execution()) == "Execution" - - with Project("./tests/samples/project"): - execution = Execution().add(Component()) - assert len(execution.executables) == 1 - assert isinstance(execution.timestamp, float) + e = Execution() + assert str(e) == e.id + assert repr(e) == "Execution" - component = Component() - execution = Execution().add(component) - assert len(execution.executables) == 1 - execution.dispatch() + execution = Execution().add(Component()) + assert len(execution.executables) == 1 + assert isinstance(execution.timestamp, float) - assert component.host_info["python_version"].startswith("3") + # add + component = Component() + execution = Execution().add(component) + assert len(execution.executables) == 1 + execution.dispatch() + + restored = Execution.find(execution.uuid) + with pytest.raises(errors.MachinableError): + restored.add(Component()) + + # host info + assert execution.host_info["python_version"].startswith("3") + + # output + c = Component().commit() + e = Execution().add(c).commit() + assert e.output(c) is None + e.save_file([c, "output.log"], "test") + assert e.output(c) == "test" + + assert e.output(c, incremental=True) == "test" + e.save_file([c, "output.log"], "testt") + assert e.output(c, incremental=True) == "t" + assert e.output(c, incremental=True) == "" + e.save_file([c, "output.log"], "testt more") + assert e.output(c, incremental=True) == " more" + + # status + e.update_status(c, "started") + assert e.is_started(c) + e.update_status(c, "heartbeat") + assert e.is_active(c) + e.update_status(c, "finished") + assert e.is_finished(c) + assert not e.is_incomplete(c) def test_execution_dispatch(tmp_storage): @@ -62,23 +95,27 @@ def test_execution_context(tmp_storage): with Execution(schedule=None) as execution: e1 = Component() e1.launch() - assert e1.execution is None + assert e1.execution == execution + assert not e1.execution.is_started() e2 = Component() e2.launch() assert len(execution.executables) == 2 - assert e2.execution is None - assert e1.is_finished() - assert e2.is_finished() + assert e2.execution == execution + assert not e2.execution.is_started() + assert e1.execution.is_finished() + assert e2.execution.is_finished() - with Execution(): + with Execution() as execution: e1 = Component() e1.launch() e2 = Component() e2.launch() - assert e1.execution is None - assert e2.execution is None - assert e1.is_finished() - assert e2.is_finished() + assert e1.execution == execution + assert e2.execution == execution + assert not e1.execution.is_started() + assert not e2.execution.is_started() + assert e1.execution.is_finished() + assert e2.execution.is_finished() def test_execution_resources(tmp_storage): @@ -89,14 +126,14 @@ def test_execution_resources(tmp_storage): # default resources can be declared via a method class T(Execution): - def default_resources(self, component): + def on_compute_default_resources(self, _): return {"1": 2} execution = T() assert execution.compute_resources(component) == {"1": 2} # default resources are reused execution = T(resources={"test": "me"}) - assert execution.resources() == {"test": "me"} + assert execution.resources()["test"] == "me" assert execution.compute_resources(component) == {"1": 2, "test": "me"} # inheritance of default resources execution = T(resources={"3": 4}) @@ -108,44 +145,18 @@ def default_resources(self, component): assert execution.compute_resources(component) == {"3": 4} # interface - r = {"test": 1, "a": True} - with Execution(resources={}) as execution: + with Execution(resources={"test": 1, "a": True}) as execution: component = Component() - assert component.resources() is None - execution.resources(r) component.launch() - assert component.resources() == r + assert component.execution.resources()["test"] == 1 - with Execution(resources={}) as execution: - # component is already finished so updating resources has no effect - execution.resources({"a": 2}) + with Execution(resources={"a": 3}) as execution: component.launch() - assert component.resources()["a"] is True + assert component.execution.resources()["a"] is True e2 = Component() - execution.resources({"a": 3}) e2.launch() - assert e2.resources()["a"] == 3 - - # retried execution - g = {"fail": True} - - class Fail(Component): - def __call__(self) -> None: - if g["fail"]: - raise ValueError("Fail!") - - c = Fail() - with pytest.raises(errors.ExecutionFailed): - with Execution(resources={"x": 1}) as execution1: - c.launch() - assert c.resources()["x"] == 1 - g["fail"] = False - c.__related__ = {} - c._relation_cache = {} - with Execution(resources={"y": 1}) as execution2: - c.launch() - assert c.resources(execution2)["y"] == 1 + assert e2.execution.resources()["a"] == 3 def test_interrupted_execution(tmp_storage): @@ -156,8 +167,8 @@ def test_interrupted_execution(tmp_storage): except errors.ExecutionFailed: pass - assert component.is_started() - assert not component.is_finished() + assert component.execution.is_started() + assert not component.execution.is_finished() # resume try: @@ -166,4 +177,75 @@ def test_interrupted_execution(tmp_storage): pass component.launch() - assert component.is_finished() + assert component.execution.is_finished() + + +def test_rerepeated_execution(tmp_storage): + project = Project("./tests/samples/project").__enter__() + + # first execution + with Execution() as execution1: + c1 = get("count").launch() + assert c1.count == 0 + assert c1.execution == execution1 + assert c1.execution.is_finished() + assert c1.count == 1 + + # second execution, nothing happens here + with execution1: + c1.launch() + assert c1.execution == execution1 + assert c1.count == 1 + + # add a new component to existing execution is not allowed + with execution1: + c2 = get("count", predicate=None) + with pytest.raises(errors.MachinableError): + c2.launch() + assert c2.count == 0 + assert not c2.is_committed() + + # resume execution + with pytest.raises(errors.ExecutionFailed): + with Execution() as execution2: + done = get("count", predicate=None).launch() + failed = get("fail", predicate=None).launch() + assert done.execution.is_finished() + assert not done.execution.is_resumed() + assert not failed.execution.is_finished() + + failed.save_file("repaired", "yes") + with execution2: + done.launch() + failed.launch() + assert failed.execution.is_finished() + assert failed.execution.is_resumed() + assert len(execution2.executables) == 2 + + # resume with another execution + with pytest.raises(errors.ExecutionFailed): + with Execution() as execution2: + done = get("count", predicate=None).launch() + failed = get("fail", predicate=None).launch() + failed.save_file("repaired", "yes") + with Execution() as execution3: + failed.launch() + assert failed.execution == execution3 + assert failed.execution.is_finished() + assert not failed.execution.is_resumed() + assert len(execution2.executables) == 2 + assert len(execution3.executables) == 1 + + # attempted re-execution - silently ignored + with Execution() as execution4: + done.launch() + assert done.count == 1 + assert not execution4.is_committed() + with Execution() as execution5: + done.launch() + done2 = get("count", predicate=None).launch() + assert done.count == 1 + assert done2.count == 1 + assert len(execution5.executables) == 2 + + project.__exit__() diff --git a/tests/test_index.py b/tests/test_index.py index 8e7a88c9..c67c9a12 100644 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -34,8 +34,8 @@ def test_index_load(tmp_path): db.close() -def test_index_commit(): - i = index.Index({"database": ":memory:"}) +def test_index_commit(tmp_path): + i = index.Index({"database": str(tmp_path / "index.sqlite")}) v = schema.Interface() e = (v.uuid, "Interface", None, "null", "[]", "null", "[]", v.timestamp) assert i.commit(v) is True @@ -47,8 +47,8 @@ def test_index_commit(): i.db.close() -def test_index_create_relation(setup=False): - i = index.Index({"database": ":memory:"}) +def test_index_create_relation(tmp_path, setup=False): + i = index.Index({"database": str(tmp_path / "index.sqlite")}) v1, v2, v3, v4 = ( schema.Interface(), schema.Interface(), @@ -72,8 +72,8 @@ def test_index_create_relation(setup=False): i.db.close() -def test_index_find(): - i = index.Index({"database": ":memory:"}) +def test_index_find(tmp_path): + i = index.Index({"database": str(tmp_path / "index.sqlite")}) v = schema.Interface() assert i.commit(v) is True assert i.find(v.uuid) == v @@ -81,8 +81,8 @@ def test_index_find(): i.db.close() -def test_index_find_by_predicate(): - i = index.Index({"database": ":memory:"}) +def test_index_find_by_predicate(tmp_path): + i = index.Index({"database": str(tmp_path / "index.sqlite")}) v = schema.Interface(module="machinable", predicate={"a": 0, "b": 0}) i.commit(v) assert len(i.find_by_predicate(module="machinable")) == 1 @@ -107,8 +107,8 @@ def test_index_find_by_predicate(): i.db.close() -def test_index_find_related(): - i, v1, v2, v3, v4 = test_index_create_relation(setup=True) +def test_index_find_related(tmp_path): + i, v1, v2, v3, v4 = test_index_create_relation(tmp_path, setup=True) q = i.find_related("test_one", v1.uuid) assert len(q) == 1 diff --git a/tests/test_interface.py b/tests/test_interface.py index e6adb422..da33e5ee 100644 --- a/tests/test_interface.py +++ b/tests/test_interface.py @@ -17,8 +17,7 @@ def test_interface_to_directory(tmp_path): assert os.path.exists(str(tmp_path / "test" / "model.json")) assert not os.path.exists(str(tmp_path / "test" / "related")) - i = Interface(derived_from=Interface()) - i.use([Interface(), Interface()]) + i = Interface(derived_from=Interface(), uses=[Interface(), Interface()]) i.to_directory(str(tmp_path / "test2")) assert load_file(str(tmp_path / "test2" / ".machinable")) == i.uuid assert load_file(str(tmp_path / "test2" / "related" / "uses")) == "\n".join( diff --git a/tests/test_project.py b/tests/test_project.py index 315be903..745d2ecf 100644 --- a/tests/test_project.py +++ b/tests/test_project.py @@ -33,6 +33,5 @@ def test_project_events(tmp_storage): component = Component.instance("dummy") component.launch() - assert component.host_info["dummy"] == "data" project.__exit__() diff --git a/tests/test_schedule.py b/tests/test_schedule.py index abefb6fe..6373335d 100644 --- a/tests/test_schedule.py +++ b/tests/test_schedule.py @@ -20,6 +20,6 @@ def test_schedule(tmp_storage): # execution supports schedule with Supported(schedule=["scheduled"]) as execution: component = Component().launch() - assert component.execution is None - assert component.is_finished() + assert component.execution == execution + assert component.execution.is_finished() assert execution.schedule.test() diff --git a/tests/test_utils.py b/tests/test_utils.py index a4e08a4a..8fe4308c 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -183,3 +183,12 @@ def test_directory_version(): "../test", ]: assert utils.is_directory_version(case) is True + + +def test_joinpath(): + assert utils.joinpath(["a", "b"]) == "a/b" + assert utils.joinpath(["a", "b", "c"]) == "a/b/c" + e = Element() + assert utils.joinpath([e, "b"]) == f"{e.id}/b" + assert utils.joinpath(["a", ""]) == "a/" + assert utils.joinpath([None, "a", None, "b"]) == "a/b"