From a5ddd949e582f7a92d50754b8645ae2fa4f5af22 Mon Sep 17 00:00:00 2001 From: Jakob Gerhard Martinussen <10655778+JakobGM@users.noreply.github.com> Date: Thu, 22 Jun 2023 01:18:51 +0200 Subject: [PATCH 01/29] Downgrade Sphinx to compatible version --- poetry.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 3b120d2..7045916 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2290,4 +2290,4 @@ pandas = ["pandas"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "65cee0051a8479939b886004fcc59ec87c7a7a6d396a4c55890fe63457557eeb" +content-hash = "19108e18ea8fd47831548215821bb4d5dbe48ea4c696922e6e1def7a5a164009" From 22871886571b5558be0390c4046ebd50c04f8298 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Thu, 26 Oct 2023 12:42:39 -0400 Subject: [PATCH 02/29] very rough draft for pydantic v2 --- poetry.lock | 218 +++++++++++++++++++++++------ pyproject.toml | 4 +- src/patito/exceptions.py | 86 +++++++++++- src/patito/polars.py | 4 +- src/patito/pydantic.py | 18 ++- src/patito/validators.py | 1 + tests/test_duckdb/test_database.py | 2 +- tests/test_dummy_data.py | 10 +- tests/test_polars.py | 20 +-- tests/test_validators.py | 24 ++-- 10 files changed, 301 insertions(+), 86 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7045916..718ca7e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,6 +11,20 @@ files = [ {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, ] +[[package]] +name = "annotated-types" +version = "0.5.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.7" +files = [ + {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, + {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} + [[package]] name = "apeye" version = "1.4.0" @@ -1169,6 +1183,43 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] +[[package]] +name = "numpy" +version = "1.24.4" +description = "Fundamental package for array computing in Python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, + {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, + {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, + {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, + {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, + {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, + {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, + {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, + {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, + {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +] + [[package]] name = "packaging" version = "23.1" @@ -1395,55 +1446,135 @@ files = [ [[package]] name = "pydantic" -version = "1.10.11" -description = "Data validation and settings management using python type hints" +version = "2.0.3" +description = "Data validation using Python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff44c5e89315b15ff1f7fdaf9853770b810936d6b01a7bcecaa227d2f8fe444f"}, - {file = "pydantic-1.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c098d4ab5e2d5b3984d3cb2527e2d6099d3de85630c8934efcfdc348a9760e"}, - {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16928fdc9cb273c6af00d9d5045434c39afba5f42325fb990add2c241402d151"}, - {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0588788a9a85f3e5e9ebca14211a496409cb3deca5b6971ff37c556d581854e7"}, - {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9baf78b31da2dc3d3f346ef18e58ec5f12f5aaa17ac517e2ffd026a92a87588"}, - {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:373c0840f5c2b5b1ccadd9286782852b901055998136287828731868027a724f"}, - {file = "pydantic-1.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:c3339a46bbe6013ef7bdd2844679bfe500347ac5742cd4019a88312aa58a9847"}, - {file = "pydantic-1.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08a6c32e1c3809fbc49debb96bf833164f3438b3696abf0fbeceb417d123e6eb"}, - {file = "pydantic-1.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a451ccab49971af043ec4e0d207cbc8cbe53dbf148ef9f19599024076fe9c25b"}, - {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02d24f7b2b365fed586ed73582c20f353a4c50e4be9ba2c57ab96f8091ddae"}, - {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f34739a89260dfa420aa3cbd069fbcc794b25bbe5c0a214f8fb29e363484b66"}, - {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e297897eb4bebde985f72a46a7552a7556a3dd11e7f76acda0c1093e3dbcf216"}, - {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d185819a7a059550ecb85d5134e7d40f2565f3dd94cfd870132c5f91a89cf58c"}, - {file = "pydantic-1.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:4400015f15c9b464c9db2d5d951b6a780102cfa5870f2c036d37c23b56f7fc1b"}, - {file = "pydantic-1.10.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2417de68290434461a266271fc57274a138510dca19982336639484c73a07af6"}, - {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:331c031ba1554b974c98679bd0780d89670d6fd6f53f5d70b10bdc9addee1713"}, - {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8268a735a14c308923e8958363e3a3404f6834bb98c11f5ab43251a4e410170c"}, - {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:44e51ba599c3ef227e168424e220cd3e544288c57829520dc90ea9cb190c3248"}, - {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d7781f1d13b19700b7949c5a639c764a077cbbdd4322ed505b449d3ca8edcb36"}, - {file = "pydantic-1.10.11-cp37-cp37m-win_amd64.whl", hash = "sha256:7522a7666157aa22b812ce14c827574ddccc94f361237ca6ea8bb0d5c38f1629"}, - {file = "pydantic-1.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc64eab9b19cd794a380179ac0e6752335e9555d214cfcb755820333c0784cb3"}, - {file = "pydantic-1.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8dc77064471780262b6a68fe67e013298d130414d5aaf9b562c33987dbd2cf4f"}, - {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe429898f2c9dd209bd0632a606bddc06f8bce081bbd03d1c775a45886e2c1cb"}, - {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:192c608ad002a748e4a0bed2ddbcd98f9b56df50a7c24d9a931a8c5dd053bd3d"}, - {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef55392ec4bb5721f4ded1096241e4b7151ba6d50a50a80a2526c854f42e6a2f"}, - {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e0bb6efe86281623abbeeb0be64eab740c865388ee934cd3e6a358784aca6e"}, - {file = "pydantic-1.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:265a60da42f9f27e0b1014eab8acd3e53bd0bad5c5b4884e98a55f8f596b2c19"}, - {file = "pydantic-1.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:469adf96c8e2c2bbfa655fc7735a2a82f4c543d9fee97bd113a7fb509bf5e622"}, - {file = "pydantic-1.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6cbfbd010b14c8a905a7b10f9fe090068d1744d46f9e0c021db28daeb8b6de1"}, - {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abade85268cc92dff86d6effcd917893130f0ff516f3d637f50dadc22ae93999"}, - {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9738b0f2e6c70f44ee0de53f2089d6002b10c33264abee07bdb5c7f03038303"}, - {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:787cf23e5a0cde753f2eabac1b2e73ae3844eb873fd1f5bdbff3048d8dbb7604"}, - {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:174899023337b9fc685ac8adaa7b047050616136ccd30e9070627c1aaab53a13"}, - {file = "pydantic-1.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:1954f8778489a04b245a1e7b8b22a9d3ea8ef49337285693cf6959e4b757535e"}, - {file = "pydantic-1.10.11-py3-none-any.whl", hash = "sha256:008c5e266c8aada206d0627a011504e14268a62091450210eda7c07fabe6963e"}, - {file = "pydantic-1.10.11.tar.gz", hash = "sha256:f66d479cf7eb331372c470614be6511eae96f1f120344c25f3f9bb59fb1b5528"}, + {file = "pydantic-2.0.3-py3-none-any.whl", hash = "sha256:614eb3321eb600c81899a88fa9858b008e3c79e0d4f1b49ab1f516b4b0c27cfb"}, + {file = "pydantic-2.0.3.tar.gz", hash = "sha256:94f13e0dcf139a5125e88283fc999788d894e14ed90cf478bcc2ee50bd4fc630"}, ] [package.dependencies] -typing-extensions = ">=4.2.0" +annotated-types = ">=0.4.0" +pydantic-core = "2.3.0" +typing-extensions = ">=4.6.1" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.3.0" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.3.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:4542c98b8364b976593703a2dda97377433b102f380b61bc3a2cbc2fbdae1d1f"}, + {file = "pydantic_core-2.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9342de50824b40f55d2600f66c6f9a91a3a24851eca39145a749a3dc804ee599"}, + {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:539432f911686cb80284c30b33eaf9f4fd9a11e1111fe0dc98fdbdce69b49821"}, + {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38a0e7ee65c8999394d92d9c724434cb629279d19844f2b69d9bbc46dc8b8b61"}, + {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_24_armv7l.whl", hash = "sha256:e3ed6834cc005798187a56c248a2240207cb8ffdda1c89e9afda4c3d526c2ea0"}, + {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:e72ac299a6bf732a60852d052acf3999d234686755a02ba111e85e7ebf8155b1"}, + {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_24_s390x.whl", hash = "sha256:616b3451b05ca63b8f433c627f68046b39543faeaa4e50d8c6699a2a1e4b85a5"}, + {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:adcb9c8848e15c613e483e0b99767ae325af27fe0dbd866df01fe5849d06e6e1"}, + {file = "pydantic_core-2.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:464bf799b422be662e5e562e62beeffc9eaa907d381a9d63a2556615bbda286d"}, + {file = "pydantic_core-2.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4638ebc17de08c2f3acba557efeb6f195c88b7299d8c55c0bb4e20638bbd4d03"}, + {file = "pydantic_core-2.3.0-cp310-none-win32.whl", hash = "sha256:9ff322c7e1030543d35d83bb521b69114d3d150750528d7757544f639def9ad6"}, + {file = "pydantic_core-2.3.0-cp310-none-win_amd64.whl", hash = "sha256:4824eb018f0a4680b1e434697a9bf3f41c7799b80076d06530cbbd212e040ccc"}, + {file = "pydantic_core-2.3.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:0aa429578e23885b3984c49d687cd05ab06f0b908ea1711a8bf7e503b7f97160"}, + {file = "pydantic_core-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20d710c1f79af930b8891bcebd84096798e4387ab64023ef41521d58f21277d3"}, + {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:309f45d4d7481d6f09cb9e35c72caa0e50add4a30bb08c04c5fe5956a0158633"}, + {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bcfb7be905aa849bd882262e1df3f75b564e2f708b4b4c7ad2d3deaf5410562"}, + {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_24_armv7l.whl", hash = "sha256:85cd9c0af34e371390e3cb2f3a470b0b40cc07568c1e966c638c49062be6352d"}, + {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_24_ppc64le.whl", hash = "sha256:37c5028cebdf731298724070838fb3a71ef1fbd201d193d311ac2cbdbca25a23"}, + {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_24_s390x.whl", hash = "sha256:e4208f23f12d0ad206a07a489ef4cb15722c10b62774c4460ee4123250be938e"}, + {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c24465dd11b65c8510f251b095fc788c7c91481c81840112fe3f76c30793a455"}, + {file = "pydantic_core-2.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3cd7ee8bbfab277ab56e272221886fd33a1b5943fbf45ae9195aa6a48715a8a0"}, + {file = "pydantic_core-2.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0fc7e0b056b66cc536e97ef60f48b3b289f6b3b62ac225afd4b22a42434617bf"}, + {file = "pydantic_core-2.3.0-cp311-none-win32.whl", hash = "sha256:4788135db4bd83a5edc3522b11544b013be7d25b74b155e08dd3b20cd6663bbb"}, + {file = "pydantic_core-2.3.0-cp311-none-win_amd64.whl", hash = "sha256:f93c867e5e85584a28c6a6feb6f2086d717266eb5d1210d096dd717b7f4dec04"}, + {file = "pydantic_core-2.3.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:73f62bb7fd862d9bcd886e10612bade6fe042eda8b47e8c129892bcfb7b45e84"}, + {file = "pydantic_core-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4d889d498fce64bfcd8adf1a78579a7f626f825cbeb2956a24a29b35f9a1df32"}, + {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d55e38a89ec2ae17b2fa7ffeda6b70f63afab1888bd0d57aaa7b7879760acb4"}, + {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1aefebb506bc1fe355d91d25f12bcdea7f4d7c2d9f0f6716dd025543777c99a5"}, + {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_24_armv7l.whl", hash = "sha256:6441a29f42585f085db0c04cd0557d4cbbb46fa68a0972409b1cfe9f430280c1"}, + {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_24_ppc64le.whl", hash = "sha256:47e8f034be31390a8f525431eb5e803a78ce7e2e11b32abf5361a972e14e6b61"}, + {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_24_s390x.whl", hash = "sha256:ad814864aba263be9c83ada44a95f72d10caabbf91589321f95c29c902bdcff0"}, + {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9eff3837d447fccf2ac38c259b14ab9cbde700df355a45a1f3ff244d5e78f8b6"}, + {file = "pydantic_core-2.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:534f3f63c000f08050c6f7f4378bf2b52d7ba9214e9d35e3f60f7ad24a4d6425"}, + {file = "pydantic_core-2.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ef6a222d54f742c24f6b143aab088702db3a827b224e75b9dd28b38597c595fe"}, + {file = "pydantic_core-2.3.0-cp312-none-win32.whl", hash = "sha256:4e26944e64ecc1d7b19db954c0f7b471f3b141ec8e1a9f57cfe27671525cd248"}, + {file = "pydantic_core-2.3.0-cp312-none-win_amd64.whl", hash = "sha256:019c5c41941438570dfc7d3f0ae389b2425add1775a357ce1e83ed1434f943d6"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:27c1bbfb9d84a75cf33b7f19b53c29eb7ead99b235fce52aced5507174ab8f98"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:7cb496e934b71f1ade844ab91d6ccac78a3520e5df02fdb2357f85a71e541e69"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5af2d43b1978958d91351afbcc9b4d0cfe144c46c61740e82aaac8bb39ab1a4d"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3097c39d7d4e8dba2ef86de171dcccad876c36d8379415ba18a5a4d0533510"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_24_armv7l.whl", hash = "sha256:dd3b023f3317dbbbc775e43651ce1a31a9cea46216ad0b5be37afc18a2007699"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:27babb9879bf2c45ed655d02639f4c30e2b9ef1b71ce59c2305bbf7287910a18"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_24_s390x.whl", hash = "sha256:2183a9e18cdc0de53bdaa1675f237259162abeb62d6ac9e527c359c1074dc55d"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c089d8e7f1b4db08b2f8e4107304eec338df046275dad432635a9be9531e2fc8"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f10aa5452b865818dd0137f568d443f5e93b60a27080a01aa4b7512c7ba13a3"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f642313d559f9d9a00c4de6820124059cc3342a0d0127b18301de2c680d5ea40"}, + {file = "pydantic_core-2.3.0-cp37-none-win32.whl", hash = "sha256:45327fc57afbe3f2c3d7f54a335d5cecee8a9fdb3906a2fbed8af4092f4926df"}, + {file = "pydantic_core-2.3.0-cp37-none-win_amd64.whl", hash = "sha256:e427b66596a6441a5607dfc0085b47d36073f88da7ac48afd284263b9b99e6ce"}, + {file = "pydantic_core-2.3.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:0b3d781c71b8bfb621ef23b9c874933e2cd33237c1a65cc20eeb37437f8e7e18"}, + {file = "pydantic_core-2.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad46027dbd5c1db87dc0b49becbe23093b143a20302028d387dae37ee5ef95f5"}, + {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39aa09ed7ce2a648c904f79032d16dda29e6913112af8465a7bf710eef23c7ca"}, + {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05b4bf8c58409586a7a04c858a86ab10f28c6c1a7c33da65e0326c59d5b0ab16"}, + {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_24_armv7l.whl", hash = "sha256:ba2b807d2b62c446120906b8580cddae1d76d3de4efbb95ccc87f5e35c75b4b2"}, + {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:ea955e4ed21f4bbb9b83fea09fc6af0bed82e69ecf6b35ec89237a0a49633033"}, + {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_24_s390x.whl", hash = "sha256:06884c07956526ac9ebfef40fe21a11605569b8fc0e2054a375fb39c978bf48f"}, + {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f868e731a18b403b88aa434d960489ceeed0ddeb44ebc02389540731a67705e0"}, + {file = "pydantic_core-2.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cb08fab0fc1db15c277b72e33ac74ad9c0c789413da8984a3eacb22a94b42ef4"}, + {file = "pydantic_core-2.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6ca34c29fbd6592de5fd39e80c1993634d704c4e7e14ba54c87b2c7c53da68fe"}, + {file = "pydantic_core-2.3.0-cp38-none-win32.whl", hash = "sha256:cd782807d35c8a41aaa7d30b5107784420eefd9fdc1c760d86007d43ae00b15d"}, + {file = "pydantic_core-2.3.0-cp38-none-win_amd64.whl", hash = "sha256:01f56d5ee70b1d39c0fd08372cc5142274070ab7181d17c86035f130eebc05b8"}, + {file = "pydantic_core-2.3.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:78b1ac0151271ce62bc2b33755f1043eda6a310373143a2f27e2bcd3d5fc8633"}, + {file = "pydantic_core-2.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:64bfd2c35a2c350f73ac52dc134d8775f93359c4c969280a6fe5301b5b6e7431"}, + {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:937c0fe9538f1212b62df6a68f8d78df3572fe3682d9a0dd8851eac8a4e46063"}, + {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d965c7c4b40d1cedec9188782e98bd576f9a04868835604200c3a6e817b824f"}, + {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_24_armv7l.whl", hash = "sha256:ad442b8585ed4a3c2d22e4bf7b465d9b7d281e055b09719a8aeb5b576422dc9b"}, + {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:4bf20c9722821fce766e685718e739deeccc60d6bc7be5029281db41f999ee0c"}, + {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_24_s390x.whl", hash = "sha256:f3dd5333049b5b3faa739e0f40b77cc8b7a1aded2f2da0e28794c81586d7b08a"}, + {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dc5f516b24d24bc9e8dd9305460899f38302b3c4f9752663b396ef9848557bf"}, + {file = "pydantic_core-2.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:055f7ea6b1fbb37880d66d70eefd22dd319b09c79d2cb99b1dbfeb34b653b0b2"}, + {file = "pydantic_core-2.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:af693a89db6d6ac97dd84dd7769b3f2bd9007b578127d0e7dda03053f4d3b34b"}, + {file = "pydantic_core-2.3.0-cp39-none-win32.whl", hash = "sha256:f60e31e3e15e8c294bf70c60f8ae4d0c3caf3af8f26466e9aa8ea4c01302749b"}, + {file = "pydantic_core-2.3.0-cp39-none-win_amd64.whl", hash = "sha256:2b79f3681481f4424d7845cc7a261d5a4baa810d656b631fa844dc9967b36a7b"}, + {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:a666134b41712e30a71afaa26deeb4da374179f769fa49784cdf0e7698880fab"}, + {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c119e9227487ad3d7c3c737d896afe548a6be554091f9745da1f4b489c40561"}, + {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73929a2fb600a2333fce2efd92596cff5e6bf8946e20e93c067b220760064862"}, + {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:41bbc2678a5b6a19371b2cb51f30ccea71f0c14b26477d2d884fed761cea42c7"}, + {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dcbff997f47d45bf028bda4c3036bb3101e89a3df271281d392b6175f71c71d1"}, + {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:afa8808159169368b66e4fbeafac6c6fd8f26246dc4d0dcc2caf94bd9cf1b828"}, + {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:12be3b5f54f8111ca38e6b7277f26c23ba5cb3344fae06f879a0a93dfc8b479e"}, + {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ed5babdcd3d052ba5cf8832561f18df20778c7ccf12587b2d82f7bf3bf259a0e"}, + {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d642e5c029e2acfacf6aa0a7a3e822086b3b777c70d364742561f9ca64c1ffc"}, + {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ba3073eb38a1294e8c7902989fb80a7a147a69db2396818722bd078476586a0"}, + {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5146a6749b1905e04e62e0ad4622f079e5582f8b3abef5fb64516c623127908"}, + {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:deeb64335f489c3c11949cbd1d1668b3f1fb2d1c6a5bf40e126ef7bf95f9fa40"}, + {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:31acc37288b8e69e4849f618c3d5cf13b58077c1a1ff9ade0b3065ba974cd385"}, + {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e09d9f6d722de9d4c1c5f122ea9bc6b25a05f975457805af4dcab7b0128aacbf"}, + {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ba6a8cf089222a171b8f84e6ec2d10f7a9d14f26be3a347b14775a8741810676"}, + {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef1fd1b24e9bcddcb168437686677104e205c8e25b066e73ffdf331d3bb8792b"}, + {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eda1a89c4526826c0a87d33596a4cd15b8f58e9250f503e39af1699ba9c878e8"}, + {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3e9a18401a28db4358da2e191508702dbf065f2664c710708cdf9552b9fa50c"}, + {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a439fd0d45d51245bbde799726adda5bd18aed3fa2b01ab2e6a64d6d13776fa3"}, + {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:bf6a1d2c920cc9528e884850a4b2ee7629e3d362d5c44c66526d4097bbb07a1a"}, + {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e33fcbea3b63a339dd94de0fc442fefacfe681cc7027ce63f67af9f7ceec7422"}, + {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:bf3ed993bdf4754909f175ff348cf8f78d4451215b8aa338633f149ca3b1f37a"}, + {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7584171eb3115acd4aba699bc836634783f5bd5aab131e88d8eeb8a3328a4a72"}, + {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1624baa76d1740711b2048f302ae9a6d73d277c55a8c3e88b53b773ebf73a971"}, + {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:06f33f695527f5a86e090f208978f9fd252c9cfc7e869d3b679bd71f7cb2c1fa"}, + {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7ecf0a67b212900e92f328181fed02840d74ed39553cdb38d27314e2b9c89dfa"}, + {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:45fa1e8ad6f4367ad73674ca560da8e827cc890eaf371f3ee063d6d7366a207b"}, + {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8d0dbcc57839831ae79fd24b1b83d42bc9448d79feaf3ed3fb5cbf94ffbf3eb7"}, + {file = "pydantic_core-2.3.0.tar.gz", hash = "sha256:5cfb5ac4e82c47d5dc25b209dd4c3989e284b80109f9e08b33c895080c424b4f"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pyflakes" @@ -1689,7 +1820,8 @@ files = [ {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win32.whl", hash = "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231"}, {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_12_6_arm64.whl", hash = "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"}, @@ -2290,4 +2422,4 @@ pandas = ["pandas"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "19108e18ea8fd47831548215821bb4d5dbe48ea4c696922e6e1def7a5a164009" +content-hash = "1c56ba4697b6ba940b943a89b3da11a6723caddf58f4cd828a19dd6e3e3b48cf" diff --git a/pyproject.toml b/pyproject.toml index 4eef66b..c917137 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,8 +12,8 @@ keywords = ["validation", "dataframe"] [tool.poetry.dependencies] python = "^3.8" -pydantic = "^1.7.0" -polars = ">=0.18.7" +pydantic = ">=2.0.0" +polars = ">=0.18.3" # Required for typing.Literal in python3.7 typing-extensions = "*" pandas = {version = "*", optional = true, python = "^3.8"} diff --git a/src/patito/exceptions.py b/src/patito/exceptions.py index 354db0f..b137512 100644 --- a/src/patito/exceptions.py +++ b/src/patito/exceptions.py @@ -1,15 +1,87 @@ """Module containing all custom exceptions raised by patito.""" -import pydantic - - -class ValidationError(pydantic.ValidationError): - """Exception raised when dataframe does not match schema.""" +from typing import Any, Callable, Generator, Iterable, Optional, Sequence, Tuple, Union +from pydantic import ValidationError as ValidationError + +Loc = Tuple[Union[int, str], ...] +ReprArgs = Sequence[Tuple[Optional[str], Any]] +RichReprResult = Iterable[Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]]] +class Representation: + """ + Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details. + + __pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations + of objects. + """ + + __slots__: Tuple[str, ...] = tuple() + + def __repr_args__(self) -> 'ReprArgs': + """ + Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden. + + Can either return: + * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]` + * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]` + """ + attrs = ((s, getattr(self, s)) for s in self.__slots__) + return [(a, v) for a, v in attrs if v is not None] + + def __repr_name__(self) -> str: + """ + Name of the instance's class, used in __repr__. + """ + return self.__class__.__name__ + + def __repr_str__(self, join_str: str) -> str: + return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__()) + + def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any, None, None]: + """ + Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects + """ + yield self.__repr_name__() + '(' + yield 1 + for name, value in self.__repr_args__(): + if name is not None: + yield name + '=' + yield fmt(value) + yield ',' + yield 0 + yield -1 + yield ')' + + def __str__(self) -> str: + return self.__repr_str__(' ') + + def __repr__(self) -> str: + return f'{self.__repr_name__()}({self.__repr_str__(", ")})' + + def __rich_repr__(self) -> 'RichReprResult': + """Get fields for Rich library""" + for name, field_repr in self.__repr_args__(): + if name is None: + yield field_repr + else: + yield name, field_repr + +class ErrorWrapper(Representation): + """Wrapper for specific column validation error.""" + __slots__ = 'exc', '_loc' + def __init__(self, exc: Exception, loc: Union[str, 'Loc']) -> None: + self.exc = exc + self._loc = loc -class ErrorWrapper(pydantic.error_wrappers.ErrorWrapper): - """Wrapper for specific column validation error.""" + def loc_tuple(self) -> 'Loc': + if isinstance(self._loc, tuple): + return self._loc + else: + return (self._loc,) + def __repr_args__(self) -> 'ReprArgs': + return [('exc', self.exc), ('loc', self.loc_tuple())] + class WrongColumnsError(TypeError): """Validation exception for column name mismatches.""" diff --git a/src/patito/polars.py b/src/patito/polars.py index 7125807..fe5bfe9 100644 --- a/src/patito/polars.py +++ b/src/patito/polars.py @@ -60,7 +60,7 @@ def _construct_lazyframe_model_class( return cls new_class = type( - f"{model.schema()['title']}LazyFrame", + f"{model.model_json_schema()['title']}LazyFrame", (cls,), {"model": model}, ) @@ -150,7 +150,7 @@ def _construct_dataframe_model_class( "hard-coded" to the given model. """ new_class = type( - f"{model.schema()['title']}DataFrame", + f"{model.model_json_schema()['title']}DataFrame", (cls,), {"model": model}, ) diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 2a5cfb1..44a7970 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -20,8 +20,8 @@ import polars as pl from polars.datatypes import PolarsDataType -from pydantic import BaseConfig, BaseModel, Field, create_model # noqa: F401 -from pydantic.main import ModelMetaclass as PydanticModelMetaclass +from pydantic import ConfigDict, BaseModel, Field, create_model # noqa: F401 +from pydantic._internal._model_construction import ModelMetaclass as PydanticModelMetaclass from typing_extensions import Literal, get_args from patito.polars import DataFrame, LazyFrame @@ -107,7 +107,7 @@ def columns(cls: Type[ModelType]) -> List[str]: # type: ignore >>> Product.columns ['name', 'price'] """ - return list(cls.schema()["properties"].keys()) + return list(cls.model_json_schema()["properties"].keys()) @property def dtypes( # type: ignore @@ -460,7 +460,7 @@ def non_nullable_columns( # type: ignore >>> sorted(MyModel.non_nullable_columns) ['another_non_nullable_field', 'non_nullable_field'] """ - return set(cls.schema().get("required", {})) + return set(cls.model_json_schema().get("required", {})) @property def nullable_columns( # type: ignore @@ -1087,6 +1087,10 @@ def join( (cls, {"outer"}), (other, {"left", "outer", "asof"}), ): + # TODO PYDANTIC V2, not sure how to implement this: + # old_field.required no longer exists, maybe this needs to be + # completely rewritten. See fields at + # https://docs.pydantic.dev/latest/api/fields/#pydantic.fields.FieldInfo for field_name, field in model.__fields__.items(): field_type = field.type_ field_default = field.default @@ -1332,7 +1336,7 @@ def _schema_properties(cls) -> Dict[str, Dict[str, Any]]: TypeError: if a field is annotated with an enum where the values are of different types. """ - schema = cls.schema(ref_template="{model}") + schema = cls.model_json_schema(ref_template="{model}") required = schema.get("required", set()) fields = {} for field_name, field_info in schema["properties"].items(): @@ -1385,6 +1389,10 @@ def _derive_model( """ new_fields = {} for new_field_name, field_definition in field_mapping.items(): + # TODO PYDANTIC V2, not sure how to implement this: + # old_field.required no longer exists, maybe this needs to be + # completely rewritten. See fields at + # https://docs.pydantic.dev/latest/api/fields/#pydantic.fields.FieldInfo if isinstance(field_definition, str): # A single string, interpreted as the name of a field on the existing # model. diff --git a/src/patito/validators.py b/src/patito/validators.py index ff0e2fa..776378e 100644 --- a/src/patito/validators.py +++ b/src/patito/validators.py @@ -316,3 +316,4 @@ def validate( errors = _find_errors(dataframe=polars_dataframe, schema=schema) if errors: raise ValidationError(errors=errors, model=schema) + diff --git a/tests/test_duckdb/test_database.py b/tests/test_duckdb/test_database.py index 955f620..dd4962a 100644 --- a/tests/test_duckdb/test_database.py +++ b/tests/test_duckdb/test_database.py @@ -117,7 +117,7 @@ class Model(BaseModel): "BOOLEAN", "BOOLEAN", pt.duckdb._enum_type_name( # pyright: ignore - field_properties=Model.schema()["properties"]["enum_column"] + field_properties=Model.model_json_schema()["properties"]["enum_column"] ), ] diff --git a/tests/test_dummy_data.py b/tests/test_dummy_data.py index ace5765..0a3fe6d 100644 --- a/tests/test_dummy_data.py +++ b/tests/test_dummy_data.py @@ -86,11 +86,11 @@ def test_generation_of_unique_data(): class UniqueModel(pt.Model): bool_column: bool - string_column: str = pt.Field(unique=True) - int_column: int = pt.Field(unique=True) - float_column: int = pt.Field(unique=True) - date_column: date = pt.Field(unique=True) - datetime_column: datetime = pt.Field(unique=True) + string_column: str = pt.Field(json_schema_extra={"unique":True}) + int_column: int = pt.Field(json_schema_extra={"unique":True}) + float_column: int = pt.Field(json_schema_extra={"unique":True}) + date_column: date = pt.Field(json_schema_extra={"unique":True}) + datetime_column: datetime = pt.Field(json_schema_extra={"unique":True}) example_df = UniqueModel.examples({"bool_column": [True, False]}) for column in UniqueModel.columns: diff --git a/tests/test_polars.py b/tests/test_polars.py index c288c0e..bbfb762 100644 --- a/tests/test_polars.py +++ b/tests/test_polars.py @@ -12,7 +12,7 @@ def test_dataframe_get_method(): """You should be able to retrieve a single row and cast to model.""" class Product(pt.Model): - product_id: int = pt.Field(unique=True) + product_id: int = pt.Field(json_schema_extra={"unique":True}) price: float df = pt.DataFrame({"product_id": [1, 2], "price": [9.99, 19.99]}) @@ -112,7 +112,7 @@ def test_dataframe_model_dtype_casting(): class DTypeModel(pt.Model): implicit_int_1: int implicit_int_2: int - explicit_uint: int = pt.Field(dtype=pl.UInt64) + explicit_uint: int = pt.Field(json_schema_extra={"dtype":pl.UInt64}) implicit_date: date implicit_datetime: datetime @@ -159,8 +159,8 @@ def test_correct_columns_and_dtype_on_read(tmp_path): """A model DataFrame should aid CSV reading with column names and dtypes.""" class Foo(pt.Model): - a: str = pt.Field(derived_from="column_1") - b: int = pt.Field(derived_from="column_2") + a: str = pt.Field(json_schema_extra={"derived_from":"column_1"}) + b: int = pt.Field(json_schema_extra={"derived_from":"column_2"}) csv_path = tmp_path / "foo.csv" csv_path.write_text("1,2") @@ -189,7 +189,7 @@ class Foo(pt.Model): assert unspecified_column_df.dtypes == [pl.Utf8, pl.Int64, pl.Float64] class DerivedModel(pt.Model): - cents: int = pt.Field(derived_from=100 * pl.col("dollars")) + cents: int = pt.Field(json_schema_extra={"derived_from":100 * pl.col("dollars")}) csv_path.write_text("month,dollars\n1,2.99") derived_df = DerivedModel.DataFrame.read_csv(csv_path) @@ -203,10 +203,10 @@ def test_derive_functionality(): class DerivedModel(pt.Model): underived: int - const_derived: int = pt.Field(derived_from=pl.lit(3)) - column_derived: int = pt.Field(derived_from="underived") - expr_derived: int = pt.Field(derived_from=2 * pl.col("underived")) - second_order_derived: int = pt.Field(derived_from=2 * pl.col("expr_derived")) + const_derived: int = pt.Field(json_schema_extra={"derived_from":pl.lit(3)}) + column_derived: int = pt.Field(json_schema_extra={"derived_from":"underived"}) + expr_derived: int = pt.Field(json_schema_extra={"derived_from":2 * pl.col("underived")}) + second_order_derived: int = pt.Field(json_schema_extra={"derived_from":2 * pl.col("expr_derived")}) df = DerivedModel.DataFrame({"underived": [1, 2]}) assert df.columns == ["underived"] @@ -224,7 +224,7 @@ class DerivedModel(pt.Model): # Non-compatible derive_from arguments should raise TypeError class InvalidModel(pt.Model): - incompatible: int = pt.Field(derived_from=object) + incompatible: int = pt.Field(json_schema_extra={"derived_from":object}) with pytest.raises( TypeError, diff --git a/tests/test_validators.py b/tests/test_validators.py index 793f02a..8293e42 100644 --- a/tests/test_validators.py +++ b/tests/test_validators.py @@ -236,7 +236,7 @@ def test_uniqueness_validation(): """It should be able to validate uniqueness.""" class MyModel(pt.Model): - column: int = pt.Field(unique=True) + column: int = pt.Field(json_schema_extra={"unique":True}) non_duplicated_df = pt.DataFrame({"column": [1, 2, 3]}) MyModel.validate(non_duplicated_df) @@ -347,7 +347,7 @@ def test_uniqueness_constraint_validation(): """Uniqueness constraints should be validated.""" class UniqueModel(pt.Model): - product_id: int = pt.Field(unique=True) + product_id: int = pt.Field(json_schema_extra={"unique":True}) validate(dataframe=pl.DataFrame({"product_id": [1, 2]}), schema=UniqueModel) @@ -373,7 +373,9 @@ class BoundModel(pt.Model): gt_column: float = pt.Field(gt=42.5) combined_column: float = pt.Field(gt=42.5, le=43) multiple_column: float = pt.Field(multiple_of=0.5) - const_column: float = pt.Field(default=3.1415, const=True) + # const fields should now use Literal instead, but pyright + # complains about Literal of float values + const_column: Literal[3.1415] = pt.Field(default=3.1415) #type: ignore regex_column: str = pt.Field(regex=r"value [A-Z]") min_length_column: str = pt.Field(min_length=2) max_length_column: str = pt.Field(max_length=2) @@ -412,10 +414,10 @@ def test_validation_of_dtype_specifiers(): class DTypeModel(pt.Model): int_column: int - int_explicit_dtype_column: int = pt.Field(dtype=pl.Int64) - smallint_column: int = pt.Field(dtype=pl.Int8) - unsigned_int_column: int = pt.Field(dtype=pl.UInt64) - unsigned_smallint_column: int = pt.Field(dtype=pl.UInt8) + int_explicit_dtype_column: int = pt.Field(json_schema_extra={"dtype":pl.Int64}) + smallint_column: int = pt.Field(json_schema_extra={"dtype":pl.Int8}) + unsigned_int_column: int = pt.Field(json_schema_extra={"dtype":pl.UInt64}) + unsigned_smallint_column: int = pt.Field(json_schema_extra={"dtype":pl.UInt8}) assert DTypeModel.dtypes == { "int_column": pl.Int64, @@ -475,7 +477,7 @@ class CustomConstraintModel(pt.Model): even_int: int = pt.Field( constraints=[(pl.col("even_int") % 2 == 0).alias("even_constraint")] ) - odd_int: int = pt.Field(constraints=pl.col("odd_int") % 2 == 1) + odd_int: int = pt.Field(json_schema_extra={"constraints":pl.col("odd_int") % 2 == 1}) df = CustomConstraintModel.DataFrame({"even_int": [2, 3], "odd_int": [1, 2]}) with pytest.raises(ValidationError) as e_info: @@ -496,7 +498,7 @@ class CustomConstraintModel(pt.Model): # We can also validate aggregation queries class PizzaSlice(pt.Model): - fraction: float = pt.Field(constraints=pl.col("fraction").sum() == 1) + fraction: float = pt.Field(json_schema_extra={"constraints":pl.col("fraction").sum() == 1}) whole_pizza = pt.DataFrame({"fraction": [0.25, 0.75]}) PizzaSlice.validate(whole_pizza) @@ -511,9 +513,9 @@ def test_anonymous_column_constraints(): class Pair(pt.Model): # pl.col("_") refers to the given field column - odd_number: int = pt.Field(constraints=pl.col("_") % 2 == 1) + odd_number: int = pt.Field(json_schema_extra={"constraints":pl.col("_") % 2 == 1}) # pt.field is simply an alias for pl.col("_") - even_number: int = pt.Field(constraints=pt.field % 2 == 0) + even_number: int = pt.Field(json_schema_extra={"constraints":pt.field % 2 == 0}) pairs = pt.DataFrame({"odd_number": [1, 3, 5], "even_number": [2, 4, 6]}) Pair.validate(pairs) From 42299dbac416870bb618099591c56c5a41eee724 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Thu, 26 Oct 2023 12:44:07 -0400 Subject: [PATCH 03/29] fix: `DataFrameValidationError` mirrors pydantic v1 `ValidationError` test_validators passes with exception of tests containing `json_schema_extra` --- poetry.lock | 753 ++++++++++++++++++++------------------- pyproject.toml | 2 +- src/patito/__init__.py | 4 +- src/patito/exceptions.py | 146 +++++++- src/patito/pydantic.py | 53 +-- src/patito/validators.py | 12 +- tests/test_model.py | 3 +- tests/test_validators.py | 54 +-- 8 files changed, 602 insertions(+), 425 deletions(-) diff --git a/poetry.lock b/poetry.lock index 718ca7e..f1d4954 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,13 +13,13 @@ files = [ [[package]] name = "annotated-types" -version = "0.5.0" +version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, - {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, ] [package.dependencies] @@ -95,17 +95,18 @@ Sphinx = ">=2.2,<8.0" [[package]] name = "babel" -version = "2.12.1" +version = "2.13.1" description = "Internationalization utilities" optional = true python-versions = ">=3.7" files = [ - {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, - {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, + {file = "Babel-2.13.1-py3-none-any.whl", hash = "sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed"}, + {file = "Babel-2.13.1.tar.gz", hash = "sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900"}, ] [package.dependencies] pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} +setuptools = {version = "*", markers = "python_version >= \"3.12\""} [[package]] name = "bandit" @@ -150,33 +151,29 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.7.0" +version = "23.10.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, - {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, - {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, - {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, - {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, - {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, - {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, - {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, - {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, - {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, - {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, + {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, + {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, + {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, + {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, + {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, + {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, + {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, + {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, + {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, + {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, + {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, + {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, + {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, + {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, + {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, + {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, + {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, + {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, ] [package.dependencies] @@ -246,97 +243,112 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, + {file = "charset-normalizer-3.3.1.tar.gz", hash = "sha256:d9137a876020661972ca6eec0766d81aef8a5627df628b664b234b73396e727e"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8aee051c89e13565c6bd366813c386939f8e928af93c29fda4af86d25b73d8f8"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:352a88c3df0d1fa886562384b86f9a9e27563d4704ee0e9d56ec6fcd270ea690"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:223b4d54561c01048f657fa6ce41461d5ad8ff128b9678cfe8b2ecd951e3f8a2"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f861d94c2a450b974b86093c6c027888627b8082f1299dfd5a4bae8e2292821"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1171ef1fc5ab4693c5d151ae0fdad7f7349920eabbaca6271f95969fa0756c2d"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28f512b9a33235545fbbdac6a330a510b63be278a50071a336afc1b78781b147"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0e842112fe3f1a4ffcf64b06dc4c61a88441c2f02f373367f7b4c1aa9be2ad5"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f9bc2ce123637a60ebe819f9fccc614da1bcc05798bbbaf2dd4ec91f3e08846"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f194cce575e59ffe442c10a360182a986535fd90b57f7debfaa5c845c409ecc3"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9a74041ba0bfa9bc9b9bb2cd3238a6ab3b7618e759b41bd15b5f6ad958d17605"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b578cbe580e3b41ad17b1c428f382c814b32a6ce90f2d8e39e2e635d49e498d1"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6db3cfb9b4fcecb4390db154e75b49578c87a3b9979b40cdf90d7e4b945656e1"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:debb633f3f7856f95ad957d9b9c781f8e2c6303ef21724ec94bea2ce2fcbd056"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-win32.whl", hash = "sha256:87071618d3d8ec8b186d53cb6e66955ef2a0e4fa63ccd3709c0c90ac5a43520f"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:e372d7dfd154009142631de2d316adad3cc1c36c32a38b16a4751ba78da2a397"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae4070f741f8d809075ef697877fd350ecf0b7c5837ed68738607ee0a2c572cf"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58e875eb7016fd014c0eea46c6fa92b87b62c0cb31b9feae25cbbe62c919f54d"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dbd95e300367aa0827496fe75a1766d198d34385a58f97683fe6e07f89ca3e3c"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de0b4caa1c8a21394e8ce971997614a17648f94e1cd0640fbd6b4d14cab13a72"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:985c7965f62f6f32bf432e2681173db41336a9c2611693247069288bcb0c7f8b"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a15c1fe6d26e83fd2e5972425a772cca158eae58b05d4a25a4e474c221053e2d"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae55d592b02c4349525b6ed8f74c692509e5adffa842e582c0f861751701a673"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be4d9c2770044a59715eb57c1144dedea7c5d5ae80c68fb9959515037cde2008"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:851cf693fb3aaef71031237cd68699dded198657ec1e76a76eb8be58c03a5d1f"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:31bbaba7218904d2eabecf4feec0d07469284e952a27400f23b6628439439fa7"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:871d045d6ccc181fd863a3cd66ee8e395523ebfbc57f85f91f035f50cee8e3d4"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:501adc5eb6cd5f40a6f77fbd90e5ab915c8fd6e8c614af2db5561e16c600d6f3"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f5fb672c396d826ca16a022ac04c9dce74e00a1c344f6ad1a0fdc1ba1f332213"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-win32.whl", hash = "sha256:bb06098d019766ca16fc915ecaa455c1f1cd594204e7f840cd6258237b5079a8"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:8af5a8917b8af42295e86b64903156b4f110a30dca5f3b5aedea123fbd638bff"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae8e5142dcc7a49168f4055255dbcced01dc1714a90a21f87448dc8d90617d1"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5b70bab78accbc672f50e878a5b73ca692f45f5b5e25c8066d748c09405e6a55"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ceca5876032362ae73b83347be8b5dbd2d1faf3358deb38c9c88776779b2e2f"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34d95638ff3613849f473afc33f65c401a89f3b9528d0d213c7037c398a51296"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9edbe6a5bf8b56a4a84533ba2b2f489d0046e755c29616ef8830f9e7d9cf5728"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6a02a3c7950cafaadcd46a226ad9e12fc9744652cc69f9e5534f98b47f3bbcf"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10b8dd31e10f32410751b3430996f9807fc4d1587ca69772e2aa940a82ab571a"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edc0202099ea1d82844316604e17d2b175044f9bcb6b398aab781eba957224bd"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b891a2f68e09c5ef989007fac11476ed33c5c9994449a4e2c3386529d703dc8b"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:71ef3b9be10070360f289aea4838c784f8b851be3ba58cf796262b57775c2f14"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:55602981b2dbf8184c098bc10287e8c245e351cd4fdcad050bd7199d5a8bf514"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:46fb9970aa5eeca547d7aa0de5d4b124a288b42eaefac677bde805013c95725c"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:520b7a142d2524f999447b3a0cf95115df81c4f33003c51a6ab637cbda9d0bf4"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-win32.whl", hash = "sha256:8ec8ef42c6cd5856a7613dcd1eaf21e5573b2185263d87d27c8edcae33b62a61"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:baec8148d6b8bd5cee1ae138ba658c71f5b03e0d69d5907703e3e1df96db5e41"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63a6f59e2d01310f754c270e4a257426fe5a591dc487f1983b3bbe793cf6bac6"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d6bfc32a68bc0933819cfdfe45f9abc3cae3877e1d90aac7259d57e6e0f85b1"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f3100d86dcd03c03f7e9c3fdb23d92e32abbca07e7c13ebd7ddfbcb06f5991f"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39b70a6f88eebe239fa775190796d55a33cfb6d36b9ffdd37843f7c4c1b5dc67"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e12f8ee80aa35e746230a2af83e81bd6b52daa92a8afaef4fea4a2ce9b9f4fa"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b6cefa579e1237ce198619b76eaa148b71894fb0d6bcf9024460f9bf30fd228"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:61f1e3fb621f5420523abb71f5771a204b33c21d31e7d9d86881b2cffe92c47c"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4f6e2a839f83a6a76854d12dbebde50e4b1afa63e27761549d006fa53e9aa80e"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:1ec937546cad86d0dce5396748bf392bb7b62a9eeb8c66efac60e947697f0e58"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:82ca51ff0fc5b641a2d4e1cc8c5ff108699b7a56d7f3ad6f6da9dbb6f0145b48"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:633968254f8d421e70f91c6ebe71ed0ab140220469cf87a9857e21c16687c034"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-win32.whl", hash = "sha256:c0c72d34e7de5604df0fde3644cc079feee5e55464967d10b24b1de268deceb9"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:63accd11149c0f9a99e3bc095bbdb5a464862d77a7e309ad5938fbc8721235ae"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5a3580a4fdc4ac05f9e53c57f965e3594b2f99796231380adb2baaab96e22761"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2465aa50c9299d615d757c1c888bc6fef384b7c4aec81c05a0172b4400f98557"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb7cd68814308aade9d0c93c5bd2ade9f9441666f8ba5aa9c2d4b389cb5e2a45"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e43805ccafa0a91831f9cd5443aa34528c0c3f2cc48c4cb3d9a7721053874b"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:854cc74367180beb327ab9d00f964f6d91da06450b0855cbbb09187bcdb02de5"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c15070ebf11b8b7fd1bfff7217e9324963c82dbdf6182ff7050519e350e7ad9f"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4c99f98fc3a1835af8179dcc9013f93594d0670e2fa80c83aa36346ee763d2"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fb765362688821404ad6cf86772fc54993ec11577cd5a92ac44b4c2ba52155b"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dced27917823df984fe0c80a5c4ad75cf58df0fbfae890bc08004cd3888922a2"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a66bcdf19c1a523e41b8e9d53d0cedbfbac2e93c649a2e9502cb26c014d0980c"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ecd26be9f112c4f96718290c10f4caea6cc798459a3a76636b817a0ed7874e42"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3f70fd716855cd3b855316b226a1ac8bdb3caf4f7ea96edcccc6f484217c9597"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:17a866d61259c7de1bdadef418a37755050ddb4b922df8b356503234fff7932c"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-win32.whl", hash = "sha256:548eefad783ed787b38cb6f9a574bd8664468cc76d1538215d510a3cd41406cb"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:45f053a0ece92c734d874861ffe6e3cc92150e32136dd59ab1fb070575189c97"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bc791ec3fd0c4309a753f95bb6c749ef0d8ea3aea91f07ee1cf06b7b02118f2f"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8c61fb505c7dad1d251c284e712d4e0372cef3b067f7ddf82a7fa82e1e9a93"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2c092be3885a1b7899cd85ce24acedc1034199d6fca1483fa2c3a35c86e43041"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2000c54c395d9e5e44c99dc7c20a64dc371f777faf8bae4919ad3e99ce5253e"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cb50a0335382aac15c31b61d8531bc9bb657cfd848b1d7158009472189f3d62"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c30187840d36d0ba2893bc3271a36a517a717f9fd383a98e2697ee890a37c273"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe81b35c33772e56f4b6cf62cf4aedc1762ef7162a31e6ac7fe5e40d0149eb67"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0bf89afcbcf4d1bb2652f6580e5e55a840fdf87384f6063c4a4f0c95e378656"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:06cf46bdff72f58645434d467bf5228080801298fbba19fe268a01b4534467f5"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3c66df3f41abee950d6638adc7eac4730a306b022570f71dd0bd6ba53503ab57"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd805513198304026bd379d1d516afbf6c3c13f4382134a2c526b8b854da1c2e"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:9505dc359edb6a330efcd2be825fdb73ee3e628d9010597aa1aee5aa63442e97"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:31445f38053476a0c4e6d12b047b08ced81e2c7c712e5a1ad97bc913256f91b2"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-win32.whl", hash = "sha256:bd28b31730f0e982ace8663d108e01199098432a30a4c410d06fe08fdb9e93f4"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:555fe186da0068d3354cdf4bbcbc609b0ecae4d04c921cc13e209eece7720727"}, + {file = "charset_normalizer-3.3.1-py3-none-any.whl", hash = "sha256:800561453acdecedaac137bf09cd719c7a440b6800ec182f077bb8e7025fb708"}, ] [[package]] name = "click" -version = "8.1.5" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.5-py3-none-any.whl", hash = "sha256:e576aa487d679441d7d30abb87e1b43d24fc53bffb8758443b1a9e1cee504548"}, - {file = "click-8.1.5.tar.gz", hash = "sha256:4be4b1af8d665c6d942909916d31a213a106800c47d0eeba73d34da3cbc11367"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -486,13 +498,13 @@ files = [ [[package]] name = "domdf-python-tools" -version = "3.6.1" +version = "3.7.0" description = "Helpful functions for Python 🐍 🛠️" optional = true python-versions = ">=3.6" files = [ - {file = "domdf_python_tools-3.6.1-py3-none-any.whl", hash = "sha256:e18158460850957f18e740eb94ede56f580ddb0cb162ab9d9834ed8bbb1b6431"}, - {file = "domdf_python_tools-3.6.1.tar.gz", hash = "sha256:acc04563d23bce4d437dd08af6b9bea788328c412772a044d8ca428a7ad861be"}, + {file = "domdf_python_tools-3.7.0-py3-none-any.whl", hash = "sha256:7b4d1c3bdb7402b872d43953824bf921ae2e52f893adbe5c0052a21a6efa2fe4"}, + {file = "domdf_python_tools-3.7.0.tar.gz", hash = "sha256:df1af9a91649af0fb2a4e7b3a4b0a0936e4f78389dd7280dd6fd2f53a339ca71"}, ] [package.dependencies] @@ -567,13 +579,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -713,13 +725,13 @@ flake8 = "*" [[package]] name = "gitdb" -version = "4.0.10" +version = "4.0.11" description = "Git Object Database" optional = false python-versions = ">=3.7" files = [ - {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, - {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, ] [package.dependencies] @@ -727,18 +739,21 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.32" +version = "3.1.40" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.32-py3-none-any.whl", hash = "sha256:e3d59b1c2c6ebb9dfa7a184daf3b6dd4914237e7488a1730a6d8f6f5d0b4187f"}, - {file = "GitPython-3.1.32.tar.gz", hash = "sha256:8d9b8cb1e80b9735e8717c9362079d3ce4c6e5ddeebedd0361b228c3a67a62f6"}, + {file = "GitPython-3.1.40-py3-none-any.whl", hash = "sha256:cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a"}, + {file = "GitPython-3.1.40.tar.gz", hash = "sha256:22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-sugar"] + [[package]] name = "html5lib" version = "1.1" @@ -1062,37 +1077,38 @@ files = [ [[package]] name = "mypy" -version = "1.4.1" +version = "1.6.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, - {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, - {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, - {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, - {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, - {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, - {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, - {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, - {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, - {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, - {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, - {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, - {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, - {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, - {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, - {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, - {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, - {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, - {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, - {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, + {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, + {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"}, + {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"}, + {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"}, + {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"}, + {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"}, + {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"}, + {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"}, + {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"}, + {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"}, + {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"}, + {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"}, + {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"}, + {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"}, + {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, + {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, + {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, ] [package.dependencies] @@ -1148,39 +1164,39 @@ setuptools = "*" [[package]] name = "numpy" -version = "1.24.4" +version = "1.24.3" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.8" files = [ - {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, - {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, - {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, - {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, - {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, - {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, - {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, - {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, - {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, - {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, - {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, + {file = "numpy-1.24.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c1104d3c036fb81ab923f507536daedc718d0ad5a8707c6061cdfd6d184e570"}, + {file = "numpy-1.24.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:202de8f38fc4a45a3eea4b63e2f376e5f2dc64ef0fa692838e31a808520efaf7"}, + {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8535303847b89aa6b0f00aa1dc62867b5a32923e4d1681a35b5eef2d9591a463"}, + {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d926b52ba1367f9acb76b0df6ed21f0b16a1ad87c6720a1121674e5cf63e2b6"}, + {file = "numpy-1.24.3-cp310-cp310-win32.whl", hash = "sha256:f21c442fdd2805e91799fbe044a7b999b8571bb0ab0f7850d0cb9641a687092b"}, + {file = "numpy-1.24.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f23af8c16022663a652d3b25dcdc272ac3f83c3af4c02eb8b824e6b3ab9d7"}, + {file = "numpy-1.24.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9a7721ec204d3a237225db3e194c25268faf92e19338a35f3a224469cb6039a3"}, + {file = "numpy-1.24.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d6cc757de514c00b24ae8cf5c876af2a7c3df189028d68c0cb4eaa9cd5afc2bf"}, + {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e3f4e85fc5d4fd311f6e9b794d0c00e7002ec122be271f2019d63376f1d385"}, + {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1d3c026f57ceaad42f8231305d4653d5f05dc6332a730ae5c0bea3513de0950"}, + {file = "numpy-1.24.3-cp311-cp311-win32.whl", hash = "sha256:c91c4afd8abc3908e00a44b2672718905b8611503f7ff87390cc0ac3423fb096"}, + {file = "numpy-1.24.3-cp311-cp311-win_amd64.whl", hash = "sha256:5342cf6aad47943286afa6f1609cad9b4266a05e7f2ec408e2cf7aea7ff69d80"}, + {file = "numpy-1.24.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7776ea65423ca6a15255ba1872d82d207bd1e09f6d0894ee4a64678dd2204078"}, + {file = "numpy-1.24.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ae8d0be48d1b6ed82588934aaaa179875e7dc4f3d84da18d7eae6eb3f06c242c"}, + {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecde0f8adef7dfdec993fd54b0f78183051b6580f606111a6d789cd14c61ea0c"}, + {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4749e053a29364d3452c034827102ee100986903263e89884922ef01a0a6fd2f"}, + {file = "numpy-1.24.3-cp38-cp38-win32.whl", hash = "sha256:d933fabd8f6a319e8530d0de4fcc2e6a61917e0b0c271fded460032db42a0fe4"}, + {file = "numpy-1.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:56e48aec79ae238f6e4395886b5eaed058abb7231fb3361ddd7bfdf4eed54289"}, + {file = "numpy-1.24.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4719d5aefb5189f50887773699eaf94e7d1e02bf36c1a9d353d9f46703758ca4"}, + {file = "numpy-1.24.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ec87a7084caa559c36e0a2309e4ecb1baa03b687201d0a847c8b0ed476a7187"}, + {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea8282b9bcfe2b5e7d491d0bf7f3e2da29700cec05b49e64d6246923329f2b02"}, + {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210461d87fb02a84ef243cac5e814aad2b7f4be953b32cb53327bb49fd77fbb4"}, + {file = "numpy-1.24.3-cp39-cp39-win32.whl", hash = "sha256:784c6da1a07818491b0ffd63c6bbe5a33deaa0e25a20e1b3ea20cf0e43f8046c"}, + {file = "numpy-1.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:d5036197ecae68d7f491fcdb4df90082b0d4960ca6599ba2659957aafced7c17"}, + {file = "numpy-1.24.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:352ee00c7f8387b44d19f4cada524586f07379c0d49270f87233983bc5087ca0"}, + {file = "numpy-1.24.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7d6acc2e7524c9955e5c903160aa4ea083736fde7e91276b0e5d98e6332812"}, + {file = "numpy-1.24.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:35400e6a8d102fd07c71ed7dcadd9eb62ee9a6e84ec159bd48c28235bbb0f8e4"}, + {file = "numpy-1.24.3.tar.gz", hash = "sha256:ab344f1bf21f140adab8e47fdbc7c35a477dc01408791f8ba00d018dd0bc5155"}, ] [[package]] @@ -1337,13 +1353,13 @@ files = [ [[package]] name = "platformdirs" -version = "3.9.1" +version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.9.1-py3-none-any.whl", hash = "sha256:ad8291ae0ae5072f66c16945166cb11c63394c7a3ad1b1bc9828ca3162da8c2f"}, - {file = "platformdirs-3.9.1.tar.gz", hash = "sha256:1b42b450ad933e981d56e59f1b97495428c9bd60698baab9f3eb3d00d5822421"}, + {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, + {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, ] [package.extras] @@ -1367,22 +1383,23 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "polars" -version = "0.18.7" +version = "0.19.11" description = "Blazingly fast DataFrame library" optional = false python-versions = ">=3.8" files = [ - {file = "polars-0.18.7-cp38-abi3-macosx_10_7_x86_64.whl", hash = "sha256:be41be31b8771df2beb70d34ecc51a3c4a1133124ff6c7ff5fcaf7b32ff17365"}, - {file = "polars-0.18.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:dc42a28a8a5123278993aad420c44333c453ce83acb2e94b0b768aa0c80f4191"}, - {file = "polars-0.18.7-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3771aff97a47fc50e8b2485c89f29dfd884f8b1cd72b4c3afbad2ba357501ab"}, - {file = "polars-0.18.7-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc41ca5c4e208784586a2a3a98c302e0c6e00254c59b043972165b95228cf420"}, - {file = "polars-0.18.7-cp38-abi3-win_amd64.whl", hash = "sha256:1f509e06297829e16de04fd134b46bba40d0e86887b1573a7c49757b1ebf9048"}, - {file = "polars-0.18.7.tar.gz", hash = "sha256:dbf0bdc0ec56d22041436a98b626f0667af6e700e3f4d5639c3c4f06eddf9e36"}, + {file = "polars-0.19.11-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:de8158e5f09346ec4622057b7afa7e5339eed61c3c3e874b469c9cb27339df51"}, + {file = "polars-0.19.11-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c76c2107260a1ca8a57f02d77ea12dc4db2090d7404b814570474db0392ecf6b"}, + {file = "polars-0.19.11-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c6cf2aa2d301230a80277f8711646453b89eadd6058baf30b7104f420daad2"}, + {file = "polars-0.19.11-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ac2890c96736dee83335b1f0b403233aba18b86760505e81eb9f96112afc55d"}, + {file = "polars-0.19.11-cp38-abi3-win_amd64.whl", hash = "sha256:95be83cb0bbd2d608849e24a973ea3135bd25ae6ce7168e31ad25a02e7773122"}, + {file = "polars-0.19.11.tar.gz", hash = "sha256:156eab31d9f9bac218bbd391559c667848372a5c584472784695e4fac087fd5b"}, ] [package.extras] adbc = ["adbc_driver_sqlite"] -all = ["polars[adbc,connectorx,deltalake,fsspec,matplotlib,numpy,pandas,pyarrow,pydantic,sqlalchemy,timezone,xlsx2csv,xlsxwriter]"] +all = ["polars[adbc,cloudpickle,connectorx,deltalake,fsspec,gevent,matplotlib,numpy,pandas,pyarrow,pydantic,pyiceberg,sqlalchemy,timezone,xlsx2csv,xlsxwriter]"] +cloudpickle = ["cloudpickle"] connectorx = ["connectorx"] deltalake = ["deltalake (>=0.10.0)"] fsspec = ["fsspec"] @@ -1391,6 +1408,8 @@ numpy = ["numpy (>=1.16.0)"] pandas = ["pandas", "pyarrow (>=7.0.0)"] pyarrow = ["pyarrow (>=7.0.0)"] pydantic = ["pydantic"] +pyiceberg = ["pyiceberg (>=0.5.0)"] +pyxlsb = ["pyxlsb (>=1.0)"] sqlalchemy = ["pandas", "sqlalchemy"] timezone = ["backports.zoneinfo", "tzdata"] xlsx2csv = ["xlsx2csv (>=0.8.0)"] @@ -1446,18 +1465,18 @@ files = [ [[package]] name = "pydantic" -version = "2.0.3" +version = "2.4.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-2.0.3-py3-none-any.whl", hash = "sha256:614eb3321eb600c81899a88fa9858b008e3c79e0d4f1b49ab1f516b4b0c27cfb"}, - {file = "pydantic-2.0.3.tar.gz", hash = "sha256:94f13e0dcf139a5125e88283fc999788d894e14ed90cf478bcc2ee50bd4fc630"}, + {file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"}, + {file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.3.0" +pydantic-core = "2.10.1" typing-extensions = ">=4.6.1" [package.extras] @@ -1465,112 +1484,117 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.3.0" +version = "2.10.1" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic_core-2.3.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:4542c98b8364b976593703a2dda97377433b102f380b61bc3a2cbc2fbdae1d1f"}, - {file = "pydantic_core-2.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9342de50824b40f55d2600f66c6f9a91a3a24851eca39145a749a3dc804ee599"}, - {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:539432f911686cb80284c30b33eaf9f4fd9a11e1111fe0dc98fdbdce69b49821"}, - {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38a0e7ee65c8999394d92d9c724434cb629279d19844f2b69d9bbc46dc8b8b61"}, - {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_24_armv7l.whl", hash = "sha256:e3ed6834cc005798187a56c248a2240207cb8ffdda1c89e9afda4c3d526c2ea0"}, - {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:e72ac299a6bf732a60852d052acf3999d234686755a02ba111e85e7ebf8155b1"}, - {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_24_s390x.whl", hash = "sha256:616b3451b05ca63b8f433c627f68046b39543faeaa4e50d8c6699a2a1e4b85a5"}, - {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:adcb9c8848e15c613e483e0b99767ae325af27fe0dbd866df01fe5849d06e6e1"}, - {file = "pydantic_core-2.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:464bf799b422be662e5e562e62beeffc9eaa907d381a9d63a2556615bbda286d"}, - {file = "pydantic_core-2.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4638ebc17de08c2f3acba557efeb6f195c88b7299d8c55c0bb4e20638bbd4d03"}, - {file = "pydantic_core-2.3.0-cp310-none-win32.whl", hash = "sha256:9ff322c7e1030543d35d83bb521b69114d3d150750528d7757544f639def9ad6"}, - {file = "pydantic_core-2.3.0-cp310-none-win_amd64.whl", hash = "sha256:4824eb018f0a4680b1e434697a9bf3f41c7799b80076d06530cbbd212e040ccc"}, - {file = "pydantic_core-2.3.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:0aa429578e23885b3984c49d687cd05ab06f0b908ea1711a8bf7e503b7f97160"}, - {file = "pydantic_core-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20d710c1f79af930b8891bcebd84096798e4387ab64023ef41521d58f21277d3"}, - {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:309f45d4d7481d6f09cb9e35c72caa0e50add4a30bb08c04c5fe5956a0158633"}, - {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bcfb7be905aa849bd882262e1df3f75b564e2f708b4b4c7ad2d3deaf5410562"}, - {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_24_armv7l.whl", hash = "sha256:85cd9c0af34e371390e3cb2f3a470b0b40cc07568c1e966c638c49062be6352d"}, - {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_24_ppc64le.whl", hash = "sha256:37c5028cebdf731298724070838fb3a71ef1fbd201d193d311ac2cbdbca25a23"}, - {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_24_s390x.whl", hash = "sha256:e4208f23f12d0ad206a07a489ef4cb15722c10b62774c4460ee4123250be938e"}, - {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c24465dd11b65c8510f251b095fc788c7c91481c81840112fe3f76c30793a455"}, - {file = "pydantic_core-2.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3cd7ee8bbfab277ab56e272221886fd33a1b5943fbf45ae9195aa6a48715a8a0"}, - {file = "pydantic_core-2.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0fc7e0b056b66cc536e97ef60f48b3b289f6b3b62ac225afd4b22a42434617bf"}, - {file = "pydantic_core-2.3.0-cp311-none-win32.whl", hash = "sha256:4788135db4bd83a5edc3522b11544b013be7d25b74b155e08dd3b20cd6663bbb"}, - {file = "pydantic_core-2.3.0-cp311-none-win_amd64.whl", hash = "sha256:f93c867e5e85584a28c6a6feb6f2086d717266eb5d1210d096dd717b7f4dec04"}, - {file = "pydantic_core-2.3.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:73f62bb7fd862d9bcd886e10612bade6fe042eda8b47e8c129892bcfb7b45e84"}, - {file = "pydantic_core-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4d889d498fce64bfcd8adf1a78579a7f626f825cbeb2956a24a29b35f9a1df32"}, - {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d55e38a89ec2ae17b2fa7ffeda6b70f63afab1888bd0d57aaa7b7879760acb4"}, - {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1aefebb506bc1fe355d91d25f12bcdea7f4d7c2d9f0f6716dd025543777c99a5"}, - {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_24_armv7l.whl", hash = "sha256:6441a29f42585f085db0c04cd0557d4cbbb46fa68a0972409b1cfe9f430280c1"}, - {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_24_ppc64le.whl", hash = "sha256:47e8f034be31390a8f525431eb5e803a78ce7e2e11b32abf5361a972e14e6b61"}, - {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_24_s390x.whl", hash = "sha256:ad814864aba263be9c83ada44a95f72d10caabbf91589321f95c29c902bdcff0"}, - {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9eff3837d447fccf2ac38c259b14ab9cbde700df355a45a1f3ff244d5e78f8b6"}, - {file = "pydantic_core-2.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:534f3f63c000f08050c6f7f4378bf2b52d7ba9214e9d35e3f60f7ad24a4d6425"}, - {file = "pydantic_core-2.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ef6a222d54f742c24f6b143aab088702db3a827b224e75b9dd28b38597c595fe"}, - {file = "pydantic_core-2.3.0-cp312-none-win32.whl", hash = "sha256:4e26944e64ecc1d7b19db954c0f7b471f3b141ec8e1a9f57cfe27671525cd248"}, - {file = "pydantic_core-2.3.0-cp312-none-win_amd64.whl", hash = "sha256:019c5c41941438570dfc7d3f0ae389b2425add1775a357ce1e83ed1434f943d6"}, - {file = "pydantic_core-2.3.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:27c1bbfb9d84a75cf33b7f19b53c29eb7ead99b235fce52aced5507174ab8f98"}, - {file = "pydantic_core-2.3.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:7cb496e934b71f1ade844ab91d6ccac78a3520e5df02fdb2357f85a71e541e69"}, - {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5af2d43b1978958d91351afbcc9b4d0cfe144c46c61740e82aaac8bb39ab1a4d"}, - {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3097c39d7d4e8dba2ef86de171dcccad876c36d8379415ba18a5a4d0533510"}, - {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_24_armv7l.whl", hash = "sha256:dd3b023f3317dbbbc775e43651ce1a31a9cea46216ad0b5be37afc18a2007699"}, - {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:27babb9879bf2c45ed655d02639f4c30e2b9ef1b71ce59c2305bbf7287910a18"}, - {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_24_s390x.whl", hash = "sha256:2183a9e18cdc0de53bdaa1675f237259162abeb62d6ac9e527c359c1074dc55d"}, - {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c089d8e7f1b4db08b2f8e4107304eec338df046275dad432635a9be9531e2fc8"}, - {file = "pydantic_core-2.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f10aa5452b865818dd0137f568d443f5e93b60a27080a01aa4b7512c7ba13a3"}, - {file = "pydantic_core-2.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f642313d559f9d9a00c4de6820124059cc3342a0d0127b18301de2c680d5ea40"}, - {file = "pydantic_core-2.3.0-cp37-none-win32.whl", hash = "sha256:45327fc57afbe3f2c3d7f54a335d5cecee8a9fdb3906a2fbed8af4092f4926df"}, - {file = "pydantic_core-2.3.0-cp37-none-win_amd64.whl", hash = "sha256:e427b66596a6441a5607dfc0085b47d36073f88da7ac48afd284263b9b99e6ce"}, - {file = "pydantic_core-2.3.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:0b3d781c71b8bfb621ef23b9c874933e2cd33237c1a65cc20eeb37437f8e7e18"}, - {file = "pydantic_core-2.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad46027dbd5c1db87dc0b49becbe23093b143a20302028d387dae37ee5ef95f5"}, - {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39aa09ed7ce2a648c904f79032d16dda29e6913112af8465a7bf710eef23c7ca"}, - {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05b4bf8c58409586a7a04c858a86ab10f28c6c1a7c33da65e0326c59d5b0ab16"}, - {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_24_armv7l.whl", hash = "sha256:ba2b807d2b62c446120906b8580cddae1d76d3de4efbb95ccc87f5e35c75b4b2"}, - {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:ea955e4ed21f4bbb9b83fea09fc6af0bed82e69ecf6b35ec89237a0a49633033"}, - {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_24_s390x.whl", hash = "sha256:06884c07956526ac9ebfef40fe21a11605569b8fc0e2054a375fb39c978bf48f"}, - {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f868e731a18b403b88aa434d960489ceeed0ddeb44ebc02389540731a67705e0"}, - {file = "pydantic_core-2.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cb08fab0fc1db15c277b72e33ac74ad9c0c789413da8984a3eacb22a94b42ef4"}, - {file = "pydantic_core-2.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6ca34c29fbd6592de5fd39e80c1993634d704c4e7e14ba54c87b2c7c53da68fe"}, - {file = "pydantic_core-2.3.0-cp38-none-win32.whl", hash = "sha256:cd782807d35c8a41aaa7d30b5107784420eefd9fdc1c760d86007d43ae00b15d"}, - {file = "pydantic_core-2.3.0-cp38-none-win_amd64.whl", hash = "sha256:01f56d5ee70b1d39c0fd08372cc5142274070ab7181d17c86035f130eebc05b8"}, - {file = "pydantic_core-2.3.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:78b1ac0151271ce62bc2b33755f1043eda6a310373143a2f27e2bcd3d5fc8633"}, - {file = "pydantic_core-2.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:64bfd2c35a2c350f73ac52dc134d8775f93359c4c969280a6fe5301b5b6e7431"}, - {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:937c0fe9538f1212b62df6a68f8d78df3572fe3682d9a0dd8851eac8a4e46063"}, - {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d965c7c4b40d1cedec9188782e98bd576f9a04868835604200c3a6e817b824f"}, - {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_24_armv7l.whl", hash = "sha256:ad442b8585ed4a3c2d22e4bf7b465d9b7d281e055b09719a8aeb5b576422dc9b"}, - {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:4bf20c9722821fce766e685718e739deeccc60d6bc7be5029281db41f999ee0c"}, - {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_24_s390x.whl", hash = "sha256:f3dd5333049b5b3faa739e0f40b77cc8b7a1aded2f2da0e28794c81586d7b08a"}, - {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dc5f516b24d24bc9e8dd9305460899f38302b3c4f9752663b396ef9848557bf"}, - {file = "pydantic_core-2.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:055f7ea6b1fbb37880d66d70eefd22dd319b09c79d2cb99b1dbfeb34b653b0b2"}, - {file = "pydantic_core-2.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:af693a89db6d6ac97dd84dd7769b3f2bd9007b578127d0e7dda03053f4d3b34b"}, - {file = "pydantic_core-2.3.0-cp39-none-win32.whl", hash = "sha256:f60e31e3e15e8c294bf70c60f8ae4d0c3caf3af8f26466e9aa8ea4c01302749b"}, - {file = "pydantic_core-2.3.0-cp39-none-win_amd64.whl", hash = "sha256:2b79f3681481f4424d7845cc7a261d5a4baa810d656b631fa844dc9967b36a7b"}, - {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:a666134b41712e30a71afaa26deeb4da374179f769fa49784cdf0e7698880fab"}, - {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c119e9227487ad3d7c3c737d896afe548a6be554091f9745da1f4b489c40561"}, - {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73929a2fb600a2333fce2efd92596cff5e6bf8946e20e93c067b220760064862"}, - {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:41bbc2678a5b6a19371b2cb51f30ccea71f0c14b26477d2d884fed761cea42c7"}, - {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dcbff997f47d45bf028bda4c3036bb3101e89a3df271281d392b6175f71c71d1"}, - {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:afa8808159169368b66e4fbeafac6c6fd8f26246dc4d0dcc2caf94bd9cf1b828"}, - {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:12be3b5f54f8111ca38e6b7277f26c23ba5cb3344fae06f879a0a93dfc8b479e"}, - {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ed5babdcd3d052ba5cf8832561f18df20778c7ccf12587b2d82f7bf3bf259a0e"}, - {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d642e5c029e2acfacf6aa0a7a3e822086b3b777c70d364742561f9ca64c1ffc"}, - {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ba3073eb38a1294e8c7902989fb80a7a147a69db2396818722bd078476586a0"}, - {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5146a6749b1905e04e62e0ad4622f079e5582f8b3abef5fb64516c623127908"}, - {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:deeb64335f489c3c11949cbd1d1668b3f1fb2d1c6a5bf40e126ef7bf95f9fa40"}, - {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:31acc37288b8e69e4849f618c3d5cf13b58077c1a1ff9ade0b3065ba974cd385"}, - {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e09d9f6d722de9d4c1c5f122ea9bc6b25a05f975457805af4dcab7b0128aacbf"}, - {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ba6a8cf089222a171b8f84e6ec2d10f7a9d14f26be3a347b14775a8741810676"}, - {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef1fd1b24e9bcddcb168437686677104e205c8e25b066e73ffdf331d3bb8792b"}, - {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eda1a89c4526826c0a87d33596a4cd15b8f58e9250f503e39af1699ba9c878e8"}, - {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3e9a18401a28db4358da2e191508702dbf065f2664c710708cdf9552b9fa50c"}, - {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a439fd0d45d51245bbde799726adda5bd18aed3fa2b01ab2e6a64d6d13776fa3"}, - {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:bf6a1d2c920cc9528e884850a4b2ee7629e3d362d5c44c66526d4097bbb07a1a"}, - {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e33fcbea3b63a339dd94de0fc442fefacfe681cc7027ce63f67af9f7ceec7422"}, - {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:bf3ed993bdf4754909f175ff348cf8f78d4451215b8aa338633f149ca3b1f37a"}, - {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7584171eb3115acd4aba699bc836634783f5bd5aab131e88d8eeb8a3328a4a72"}, - {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1624baa76d1740711b2048f302ae9a6d73d277c55a8c3e88b53b773ebf73a971"}, - {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:06f33f695527f5a86e090f208978f9fd252c9cfc7e869d3b679bd71f7cb2c1fa"}, - {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7ecf0a67b212900e92f328181fed02840d74ed39553cdb38d27314e2b9c89dfa"}, - {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:45fa1e8ad6f4367ad73674ca560da8e827cc890eaf371f3ee063d6d7366a207b"}, - {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8d0dbcc57839831ae79fd24b1b83d42bc9448d79feaf3ed3fb5cbf94ffbf3eb7"}, - {file = "pydantic_core-2.3.0.tar.gz", hash = "sha256:5cfb5ac4e82c47d5dc25b209dd4c3989e284b80109f9e08b33c895080c424b4f"}, + {file = "pydantic_core-2.10.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63"}, + {file = "pydantic_core-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6"}, + {file = "pydantic_core-2.10.1-cp310-none-win32.whl", hash = "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b"}, + {file = "pydantic_core-2.10.1-cp310-none-win_amd64.whl", hash = "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0"}, + {file = "pydantic_core-2.10.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea"}, + {file = "pydantic_core-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4"}, + {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607"}, + {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f"}, + {file = "pydantic_core-2.10.1-cp311-none-win32.whl", hash = "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6"}, + {file = "pydantic_core-2.10.1-cp311-none-win_amd64.whl", hash = "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27"}, + {file = "pydantic_core-2.10.1-cp311-none-win_arm64.whl", hash = "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325"}, + {file = "pydantic_core-2.10.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921"}, + {file = "pydantic_core-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d"}, + {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f"}, + {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c"}, + {file = "pydantic_core-2.10.1-cp312-none-win32.whl", hash = "sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f"}, + {file = "pydantic_core-2.10.1-cp312-none-win_amd64.whl", hash = "sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430"}, + {file = "pydantic_core-2.10.1-cp312-none-win_arm64.whl", hash = "sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f"}, + {file = "pydantic_core-2.10.1-cp37-none-win32.whl", hash = "sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c"}, + {file = "pydantic_core-2.10.1-cp37-none-win_amd64.whl", hash = "sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e"}, + {file = "pydantic_core-2.10.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc"}, + {file = "pydantic_core-2.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e"}, + {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561"}, + {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de"}, + {file = "pydantic_core-2.10.1-cp38-none-win32.whl", hash = "sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee"}, + {file = "pydantic_core-2.10.1-cp38-none-win_amd64.whl", hash = "sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e"}, + {file = "pydantic_core-2.10.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970"}, + {file = "pydantic_core-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429"}, + {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7"}, + {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595"}, + {file = "pydantic_core-2.10.1-cp39-none-win32.whl", hash = "sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a"}, + {file = "pydantic_core-2.10.1-cp39-none-win_amd64.whl", hash = "sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776"}, + {file = "pydantic_core-2.10.1.tar.gz", hash = "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82"}, ] [package.dependencies] @@ -1603,13 +1627,13 @@ plugins = ["importlib-metadata"] [[package]] name = "pyright" -version = "1.1.317" +version = "1.1.333" description = "Command line wrapper for pyright" optional = false python-versions = ">=3.7" files = [ - {file = "pyright-1.1.317-py3-none-any.whl", hash = "sha256:9cf24f83fe8f2cf00773068e06ce771f03331590c7d5e771546f81d7f60efaba"}, - {file = "pyright-1.1.317.tar.gz", hash = "sha256:74da4d3e2dcfe66a2d1d1001e16431ec17aac0ad35b03c0410f7379c2cb5c7f0"}, + {file = "pyright-1.1.333-py3-none-any.whl", hash = "sha256:f0a7b7b0cac11c396b17ef3cf6c8527aca1269edaf5cf8203eed7d6dd1ef52aa"}, + {file = "pyright-1.1.333.tar.gz", hash = "sha256:1c49b0029048120c4378f3baf6c1dcbbfb221678bb69654fe773c514430ac53c"}, ] [package.dependencies] @@ -1621,13 +1645,13 @@ dev = ["twine (>=3.4.1)"] [[package]] name = "pytest" -version = "7.4.0" +version = "7.4.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, ] [package.dependencies] @@ -1711,6 +1735,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1718,8 +1743,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1736,6 +1768,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1743,6 +1776,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1790,20 +1824,20 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruamel-yaml" -version = "0.17.32" +version = "0.18.2" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" optional = true python-versions = ">=3" files = [ - {file = "ruamel.yaml-0.17.32-py3-none-any.whl", hash = "sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447"}, - {file = "ruamel.yaml-0.17.32.tar.gz", hash = "sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2"}, + {file = "ruamel.yaml-0.18.2-py3-none-any.whl", hash = "sha256:92076ac8a83dbf44ca661dbed3c935229c8cbc2f10b05959dd3bd5292d8353d3"}, + {file = "ruamel.yaml-0.18.2.tar.gz", hash = "sha256:9bce33f7a814cea4c29a9c62fe872d2363d6220b767891d956eacea8fa5e6fe8"}, ] [package.dependencies] "ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.12\""} [package.extras] -docs = ["ryd"] +docs = ["mercurial (>5.7)", "ryd"] jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] @@ -1813,43 +1847,42 @@ description = "C version of reader, parser and emitter for ruamel.yaml derived f optional = true python-versions = ">=3.5" files = [ - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win32.whl", hash = "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win32.whl", hash = "sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win_amd64.whl", hash = "sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win32.whl", hash = "sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win32.whl", hash = "sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win32.whl", hash = "sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5"}, - {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, ] [[package]] @@ -2035,13 +2068,13 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] [[package]] name = "sphinx-tabs" -version = "3.4.1" +version = "3.4.4" description = "Tabbed views for Sphinx" optional = true python-versions = "~=3.7" files = [ - {file = "sphinx-tabs-3.4.1.tar.gz", hash = "sha256:d2a09f9e8316e400d57503f6df1c78005fdde220e5af589cc79d493159e1b832"}, - {file = "sphinx_tabs-3.4.1-py3-none-any.whl", hash = "sha256:7cea8942aeccc5d01a995789c01804b787334b55927f29b36ba16ed1e7cb27c6"}, + {file = "sphinx-tabs-3.4.4.tar.gz", hash = "sha256:f1b72c4f23d1ba9cdcaf880fd883524bc70689f561b9785719b8b3c3c5ed0aca"}, + {file = "sphinx_tabs-3.4.4-py3-none-any.whl", hash = "sha256:85939b689a0b0a24bf0da418b9acf14b0b0fca7a7a5cd35461ee452a2d4e716b"}, ] [package.dependencies] @@ -2051,7 +2084,7 @@ sphinx = "*" [package.extras] code-style = ["pre-commit (==2.13.0)"] -testing = ["bs4", "coverage", "pygments", "pytest (>=7.1,<8)", "pytest-cov", "pytest-regressions", "rinohtype", "sphinx-testing"] +testing = ["bs4", "coverage", "pygments", "pytest (>=7.1,<8)", "pytest-cov", "pytest-regressions", "rinohtype"] [[package]] name = "sphinx-toolbox" @@ -2273,24 +2306,24 @@ files = [ [[package]] name = "types-setuptools" -version = "68.0.0.2" +version = "68.2.0.0" description = "Typing stubs for setuptools" optional = false python-versions = "*" files = [ - {file = "types-setuptools-68.0.0.2.tar.gz", hash = "sha256:fede8b46862dd9fe68a12f11a8444c3d240d11178eba7d584d6f22ca3114b894"}, - {file = "types_setuptools-68.0.0.2-py3-none-any.whl", hash = "sha256:311a14819416716029d1113c7452143e2fa857e6cc19186bb6830aff69379c48"}, + {file = "types-setuptools-68.2.0.0.tar.gz", hash = "sha256:a4216f1e2ef29d089877b3af3ab2acf489eb869ccaf905125c69d2dc3932fd85"}, + {file = "types_setuptools-68.2.0.0-py3-none-any.whl", hash = "sha256:77edcc843e53f8fc83bb1a840684841f3dc804ec94562623bfa2ea70d5a2ba1b"}, ] [[package]] name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] @@ -2306,13 +2339,13 @@ files = [ [[package]] name = "urllib3" -version = "2.0.3" +version = "2.0.7" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"}, - {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"}, + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, ] [package.extras] @@ -2400,17 +2433,17 @@ tests-strict = ["codecov (==2.0.15)", "pytest (==4.6.0)", "pytest (==4.6.0)", "p [[package]] name = "zipp" -version = "3.16.2" +version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = true python-versions = ">=3.8" files = [ - {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, - {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [extras] @@ -2422,4 +2455,4 @@ pandas = ["pandas"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "1c56ba4697b6ba940b943a89b3da11a6723caddf58f4cd828a19dd6e3e3b48cf" +content-hash = "da489708a531a87851eba1b1cebd18e12115acc0cf2e41bf56b123d0d2928a55" diff --git a/pyproject.toml b/pyproject.toml index c917137..c0b4261 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ keywords = ["validation", "dataframe"] [tool.poetry.dependencies] python = "^3.8" pydantic = ">=2.0.0" -polars = ">=0.18.3" +polars = ">=0.19.0" # Required for typing.Literal in python3.7 typing-extensions = "*" pandas = {version = "*", optional = true, python = "^3.8"} diff --git a/src/patito/__init__.py b/src/patito/__init__.py index e9b4f7f..a71b314 100644 --- a/src/patito/__init__.py +++ b/src/patito/__init__.py @@ -2,7 +2,7 @@ from polars import Expr, Series, col from patito import exceptions, sql -from patito.exceptions import ValidationError +# from patito.exceptions import ValidationError from patito.polars import DataFrame, LazyFrame from patito.pydantic import Field, Model @@ -16,7 +16,7 @@ "LazyFrame", "Model", "Series", - "ValidationError", + # "ValidationError", "_CACHING_AVAILABLE", "_DUCKDB_AVAILABLE", "col", diff --git a/src/patito/exceptions.py b/src/patito/exceptions.py index b137512..105af29 100644 --- a/src/patito/exceptions.py +++ b/src/patito/exceptions.py @@ -1,11 +1,27 @@ -"""Module containing all custom exceptions raised by patito.""" +import json +from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Sequence, Tuple, Type, Union, Callable, TypedDict, Iterable + +if TYPE_CHECKING: + from pydantic import BaseModel + + Loc = Tuple[Union[int, str], ...] + + class _ErrorDictRequired(TypedDict): + loc: Loc + msg: str + type: str + + class ErrorDict(_ErrorDictRequired, total=False): + ctx: Dict[str, Any] + + Loc = Tuple[Union[int, str], ...] + ReprArgs = Sequence[Tuple[Optional[str], Any]] + RichReprResult = Iterable[Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]]] + + +__all__ = 'ErrorWrapper', 'ValidationError' -from typing import Any, Callable, Generator, Iterable, Optional, Sequence, Tuple, Union -from pydantic import ValidationError as ValidationError -Loc = Tuple[Union[int, str], ...] -ReprArgs = Sequence[Tuple[Optional[str], Any]] -RichReprResult = Iterable[Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]]] class Representation: """ Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details. @@ -66,7 +82,6 @@ def __rich_repr__(self) -> 'RichReprResult': yield name, field_repr class ErrorWrapper(Representation): - """Wrapper for specific column validation error.""" __slots__ = 'exc', '_loc' def __init__(self, exc: Exception, loc: Union[str, 'Loc']) -> None: @@ -81,7 +96,122 @@ def loc_tuple(self) -> 'Loc': def __repr_args__(self) -> 'ReprArgs': return [('exc', self.exc), ('loc', self.loc_tuple())] - + + +# ErrorList is something like Union[List[Union[List[ErrorWrapper], ErrorWrapper]], ErrorWrapper] +# but recursive, therefore just use: +ErrorList = Union[Sequence[Any], ErrorWrapper] + + +class DataFrameValidationError(Representation, ValueError): + __slots__ = 'raw_errors', 'model', '_error_cache' + + def __init__(self, errors: Sequence[ErrorList], model: 'BaseModel') -> None: + self.raw_errors = errors + self.model = model + self._error_cache: Optional[List['ErrorDict']] = None + + def errors(self) -> List['ErrorDict']: + if self._error_cache is None: + self._error_cache = list(flatten_errors(self.raw_errors)) + return self._error_cache + + # def json(self, *, indent: Union[None, int, str] = 2) -> str: + # return json.dumps(self.errors(), indent=indent, default=pydantic_encoder) + + def __str__(self) -> str: + errors = self.errors() + no_errors = len(errors) + return ( + f'{no_errors} validation error{"" if no_errors == 1 else "s"} for {self.model.__name__}\n' + f'{display_errors(errors)}' + ) + + def __repr_args__(self) -> 'ReprArgs': + return [('model', self.model.__name__), ('errors', self.errors())] + + +def display_errors(errors: List['ErrorDict']) -> str: + return '\n'.join(f'{_display_error_loc(e)}\n {e["msg"]} ({_display_error_type_and_ctx(e)})' for e in errors) + + +def _display_error_loc(error: 'ErrorDict') -> str: + return ' -> '.join(str(e) for e in error['loc']) + + +def _display_error_type_and_ctx(error: 'ErrorDict') -> str: + t = 'type=' + error['type'] + ctx = error.get('ctx') + if ctx: + return t + ''.join(f'; {k}={v}' for k, v in ctx.items()) + else: + return t + + +def flatten_errors( + errors: Sequence[Any], loc: Optional['Loc'] = None +) -> Generator['ErrorDict', None, None]: + for error in errors: + if isinstance(error, ErrorWrapper): + + if loc: + error_loc = loc + error.loc_tuple() + else: + error_loc = error.loc_tuple() + + if isinstance(error.exc, DataFrameValidationError): + yield from flatten_errors(error.exc.raw_errors, error_loc) + else: + yield error_dict(error.exc, error_loc) + elif isinstance(error, list): + yield from flatten_errors(error, loc=loc) + else: + raise RuntimeError(f'Unknown error object: {error}') + + +def error_dict(exc: Exception, loc: 'Loc') -> 'ErrorDict': + type_ = get_exc_type(exc.__class__) + msg_template = getattr(exc, 'msg_template', None) + ctx = exc.__dict__ + if msg_template: + msg = msg_template.format(**ctx) + else: + msg = str(exc) + + d: 'ErrorDict' = {'loc': loc, 'msg': msg, 'type': type_} + + if ctx: + d['ctx'] = ctx + + return d + + +_EXC_TYPE_CACHE: Dict[Type[Exception], str] = {} + + +def get_exc_type(cls: Type[Exception]) -> str: + # slightly more efficient than using lru_cache since we don't need to worry about the cache filling up + try: + return _EXC_TYPE_CACHE[cls] + except KeyError: + r = _get_exc_type(cls) + _EXC_TYPE_CACHE[cls] = r + return r + + +def _get_exc_type(cls: Type[Exception]) -> str: + if issubclass(cls, AssertionError): + return 'assertion_error' + + base_name = 'type_error' if issubclass(cls, TypeError) else 'value_error' + if cls in (TypeError, ValueError): + # just TypeError or ValueError, no extra code + return base_name + + # if it's not a TypeError or ValueError, we just take the lowercase of the exception name + # no chaining or snake case logic, use "code" for more complex error types. + code = getattr(cls, 'code', None) or cls.__name__.replace('Error', '').lower() + return base_name + '.' + code class WrongColumnsError(TypeError): """Validation exception for column name mismatches.""" diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 44a7970..c40bd8f 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -16,13 +16,14 @@ TypeVar, Union, cast, + Literal, + get_args ) import polars as pl from polars.datatypes import PolarsDataType from pydantic import ConfigDict, BaseModel, Field, create_model # noqa: F401 from pydantic._internal._model_construction import ModelMetaclass as PydanticModelMetaclass -from typing_extensions import Literal, get_args from patito.polars import DataFrame, LazyFrame from patito.validators import validate @@ -58,6 +59,14 @@ "boolean": pl.Boolean, } +PYTHON_TO_PYDANTIC_TYPES = { + str: "string", + int: "integer", + float: "number", + bool: "boolean", + type(None): "null", +} + class ModelMetaclass(PydanticModelMetaclass): """ @@ -171,16 +180,7 @@ def valid_dtypes( # type: ignore valid_dtypes = {} for column, props in cls._schema_properties().items(): column_dtypes: List[Union[PolarsDataType, pl.List]] - if props.get("type") == "array": - array_props = props["items"] - item_dtypes = cls._valid_dtypes(props=array_props) - if item_dtypes is None: - raise NotImplementedError( - f"No valid dtype mapping found for column '{column}'." - ) - column_dtypes = [pl.List(dtype) for dtype in item_dtypes] - else: - column_dtypes = cls._valid_dtypes(props=props) # pyright: ignore + column_dtypes = cls._valid_dtypes(column, props=props) # pyright: ignore if column_dtypes is None: raise NotImplementedError( @@ -190,8 +190,10 @@ def valid_dtypes( # type: ignore return valid_dtypes - @staticmethod + @classmethod def _valid_dtypes( # noqa: C901 + cls: Type[ModelType], + column: str, props: Dict, ) -> Optional[List[pl.PolarsDataType]]: """ @@ -203,6 +205,14 @@ def _valid_dtypes( # noqa: C901 Returns: List of valid dtypes. None if no mapping exists. """ + if props.get("type") == "array": + array_props = props["items"] + item_dtypes = cls._valid_dtypes(column, array_props) + if item_dtypes is None: + raise NotImplementedError( + f"No valid dtype mapping found for column '{column}'." + ) + return [pl.List(dtype) for dtype in item_dtypes] if "dtype" in props: return [ props["dtype"], @@ -210,6 +220,11 @@ def _valid_dtypes( # noqa: C901 elif "enum" in props and props["type"] == "string": return [pl.Categorical, pl.Utf8] elif "type" not in props: + if 'anyOf' in props: + res = [cls._valid_dtypes(column, sub_props) for sub_props in props['anyOf']] + return list(itertools.chain.from_iterable(res)) + elif 'const' in props: + return cls._valid_dtypes(column, {'type': PYTHON_TO_PYDANTIC_TYPES.get(type(props['const']))}) return None elif props["type"] == "integer": return [ @@ -460,7 +475,7 @@ def non_nullable_columns( # type: ignore >>> sorted(MyModel.non_nullable_columns) ['another_non_nullable_field', 'non_nullable_field'] """ - return set(cls.model_json_schema().get("required", {})) + return set(k for k in cls.valid_dtypes.keys() if pl.Null not in cls.valid_dtypes[k]) @property def nullable_columns( # type: ignore @@ -1341,7 +1356,7 @@ def _schema_properties(cls) -> Dict[str, Dict[str, Any]]: fields = {} for field_name, field_info in schema["properties"].items(): if "$ref" in field_info: - definition = schema["definitions"][field_info["$ref"]] + definition = schema["$defs"][field_info["$ref"]] if "enum" in definition and "type" not in definition: enum_types = set(type(value) for value in definition["enum"]) if len(enum_types) > 1: @@ -1353,17 +1368,13 @@ def _schema_properties(cls) -> Dict[str, Dict[str, Any]]: ) enum_type = enum_types.pop() # TODO: Support time-delta, date, and date-time. - definition["type"] = { - str: "string", - int: "integer", - float: "number", - bool: "boolean", - type(None): "null", - }[enum_type] + definition["type"] = PYTHON_TO_PYDANTIC_TYPES[enum_type] fields[field_name] = definition else: fields[field_name] = field_info fields[field_name]["required"] = field_name in required + if 'const' in field_info and 'type' not in field_info: + fields[field_name]['type'] = PYTHON_TO_PYDANTIC_TYPES[type(field_info['const'])] return fields diff --git a/src/patito/validators.py b/src/patito/validators.py index 776378e..af873aa 100644 --- a/src/patito/validators.py +++ b/src/patito/validators.py @@ -14,7 +14,7 @@ MissingValuesError, RowValueError, SuperflousColumnsError, - ValidationError, + DataFrameValidationError, ) if sys.version_info >= (3, 10): # pragma: no cover @@ -115,11 +115,11 @@ def _find_errors( # noqa: C901 """ errors: list[ErrorWrapper] = [] # Check if any columns are missing - for missig_column in set(schema.columns) - set(dataframe.columns): + for missing_column in set(schema.columns) - set(dataframe.columns): errors.append( ErrorWrapper( MissingColumnsError("Missing column"), - loc=missig_column, + loc=missing_column, ) ) @@ -241,8 +241,8 @@ def _find_errors( # noqa: C901 "multipleOf": lambda v: (col == 0) | ((col % v) == 0), "const": lambda v: col == v, "pattern": lambda v: col.str.contains(v), - "minLength": lambda v: col.str.lengths() >= v, - "maxLength": lambda v: col.str.lengths() <= v, + "minLength": lambda v: col.str.len_chars() >= v, + "maxLength": lambda v: col.str.len_chars() <= v, } checks = [ check(column_properties[key]) @@ -315,5 +315,5 @@ def validate( errors = _find_errors(dataframe=polars_dataframe, schema=schema) if errors: - raise ValidationError(errors=errors, model=schema) + raise DataFrameValidationError(errors=errors, model=schema) diff --git a/tests/test_model.py b/tests/test_model.py index 1ee8334..78a3fcd 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -3,12 +3,11 @@ import enum import re from datetime import date, datetime, timedelta -from typing import List, Optional, Type +from typing import List, Optional, Type, Literal import polars as pl import pytest from pydantic import ValidationError -from typing_extensions import Literal import patito as pt diff --git a/tests/test_validators.py b/tests/test_validators.py index 8293e42..8daa919 100644 --- a/tests/test_validators.py +++ b/tests/test_validators.py @@ -9,7 +9,7 @@ from typing_extensions import Literal import patito as pt -from patito.exceptions import ValidationError +from patito.exceptions import DataFrameValidationError from patito.validators import _dewrap_optional, _is_optional, validate @@ -55,7 +55,7 @@ class SingleColumnModel(pt.Model): column_2: str # First we raise an error because we are missing column_1 - with pytest.raises(ValidationError) as e_info: + with pytest.raises(DataFrameValidationError) as e_info: validate(dataframe=pl.DataFrame(), schema=SingleColumnModel) errors = e_info.value.errors() @@ -81,7 +81,7 @@ class SingleColumnModel(pt.Model): column_1: int # We raise an error because we have added column_2 - with pytest.raises(ValidationError) as e_info: + with pytest.raises(DataFrameValidationError) as e_info: validate( dataframe=pl.DataFrame().with_columns( [ @@ -106,7 +106,7 @@ def test_validate_non_nullable_columns(): class SmallModel(pt.Model): column_1: int - column_2: Optional[int] + column_2: Optional[int] = None # We insert nulls into a non-optional column, causing an exception wrong_nulls_df = pl.DataFrame().with_columns( @@ -115,7 +115,7 @@ class SmallModel(pt.Model): pl.lit(None).cast(pl.Int64).alias("column_2"), ] ) - with pytest.raises(ValidationError) as e_info: + with pytest.raises(DataFrameValidationError) as e_info: validate( dataframe=wrong_nulls_df, schema=SmallModel, @@ -146,7 +146,7 @@ class IntModel(pt.Model): for dtype in (pl.Utf8, pl.Date, pl.Float32, pl.Float64): series = pl.Series([], dtype=dtype).alias("column") dataframe = pl.DataFrame([series]) - with pytest.raises(ValidationError) as e_info: + with pytest.raises(DataFrameValidationError) as e_info: validate(dataframe=dataframe, schema=IntModel) errors = e_info.value.errors() @@ -186,7 +186,7 @@ class CompleteModel(pt.Model): else: dtype = pl.Int64 - with pytest.raises(ValidationError) as e_info: + with pytest.raises(DataFrameValidationError) as e_info: validate( dataframe=valid_df.with_columns(pl.lit(1, dtype=dtype).alias(column)), schema=CompleteModel, @@ -245,7 +245,7 @@ class MyModel(pt.Model): MyModel.validate(empty_df) duplicated_df = pt.DataFrame({"column": [1, 1, 2]}) - with pytest.raises(pt.exceptions.ValidationError): + with pytest.raises(pt.exceptions.DataFrameValidationError): MyModel.validate(duplicated_df) @@ -270,27 +270,27 @@ class StringModel(pt.Model): c: str validate(dataframe=string_df, schema=StringModel) - with pytest.raises(ValidationError): + with pytest.raises(DataFrameValidationError): validate(dataframe=date_df, schema=StringModel) - with pytest.raises(ValidationError): + with pytest.raises(DataFrameValidationError): validate(dataframe=datetime_df, schema=StringModel) class DateModel(pt.Model): c: date validate(dataframe=date_df, schema=DateModel) - with pytest.raises(ValidationError): + with pytest.raises(DataFrameValidationError): validate(dataframe=string_df, schema=DateModel) - with pytest.raises(ValidationError): + with pytest.raises(DataFrameValidationError): validate(dataframe=datetime_df, schema=DateModel) class DateTimeModel(pt.Model): c: datetime validate(dataframe=datetime_df, schema=DateTimeModel) - with pytest.raises(ValidationError): + with pytest.raises(DataFrameValidationError): validate(dataframe=string_df, schema=DateTimeModel) - with pytest.raises(ValidationError): + with pytest.raises(DataFrameValidationError): validate(dataframe=date_df, schema=DateTimeModel) @@ -309,7 +309,7 @@ class EnumModel(pt.Model): validate(dataframe=valid_df, schema=EnumModel) invalid_df = pl.DataFrame({"column": ["d"]}) - with pytest.raises(ValidationError) as e_info: + with pytest.raises(DataFrameValidationError) as e_info: validate(dataframe=invalid_df, schema=EnumModel) errors = e_info.value.errors() @@ -331,7 +331,7 @@ class EnumModel(pt.Model): validate(dataframe=valid_df, schema=EnumModel) invalid_df = pl.DataFrame({"column": ["d"]}) - with pytest.raises(ValidationError) as e_info: + with pytest.raises(DataFrameValidationError) as e_info: validate(dataframe=invalid_df, schema=EnumModel) errors = e_info.value.errors() @@ -351,7 +351,7 @@ class UniqueModel(pt.Model): validate(dataframe=pl.DataFrame({"product_id": [1, 2]}), schema=UniqueModel) - with pytest.raises(ValidationError) as e_info: + with pytest.raises(DataFrameValidationError) as e_info: validate(dataframe=pl.DataFrame({"product_id": [1, 1]}), schema=UniqueModel) errors = e_info.value.errors() @@ -376,7 +376,7 @@ class BoundModel(pt.Model): # const fields should now use Literal instead, but pyright # complains about Literal of float values const_column: Literal[3.1415] = pt.Field(default=3.1415) #type: ignore - regex_column: str = pt.Field(regex=r"value [A-Z]") + regex_column: str = pt.Field(pattern=r"value [A-Z]") min_length_column: str = pt.Field(min_length=2) max_length_column: str = pt.Field(max_length=2) @@ -398,7 +398,7 @@ class BoundModel(pt.Model): + valid[column_index + 1 :] ) invalid_df = pl.DataFrame(data=[data], schema=BoundModel.columns) - with pytest.raises(ValidationError) as e_info: + with pytest.raises(DataFrameValidationError) as e_info: BoundModel.validate(invalid_df) errors = e_info.value.errors() assert len(errors) == 1 @@ -459,7 +459,7 @@ class DTypeModel(pt.Model): + valid[column_index + 1 :] ) invalid_df = pl.DataFrame(data=data, schema=DTypeModel.columns) - with pytest.raises(ValidationError) as e_info: + with pytest.raises(DataFrameValidationError) as e_info: DTypeModel.validate(invalid_df) errors = e_info.value.errors() assert len(errors) == 1 @@ -475,12 +475,12 @@ def test_custom_constraint_validation(): class CustomConstraintModel(pt.Model): even_int: int = pt.Field( - constraints=[(pl.col("even_int") % 2 == 0).alias("even_constraint")] + json_schema_extra={"constraints": [(pl.col("even_int") % 2 == 0).alias("even_constraint")]} ) odd_int: int = pt.Field(json_schema_extra={"constraints":pl.col("odd_int") % 2 == 1}) df = CustomConstraintModel.DataFrame({"even_int": [2, 3], "odd_int": [1, 2]}) - with pytest.raises(ValidationError) as e_info: + with pytest.raises(DataFrameValidationError) as e_info: df.validate() errors = e_info.value.errors() assert len(errors) == 2 @@ -504,7 +504,7 @@ class PizzaSlice(pt.Model): PizzaSlice.validate(whole_pizza) part_pizza = pt.DataFrame({"fraction": [0.25, 0.25]}) - with pytest.raises(ValidationError): + with pytest.raises(DataFrameValidationError): PizzaSlice.validate(part_pizza) @@ -519,7 +519,7 @@ class Pair(pt.Model): pairs = pt.DataFrame({"odd_number": [1, 3, 5], "even_number": [2, 4, 6]}) Pair.validate(pairs) - with pytest.raises(ValidationError): + with pytest.raises(DataFrameValidationError): Pair.validate( pairs.select( [ @@ -599,5 +599,9 @@ class ListModel(pt.Model): ("nullable_int_or_null_list", "nullable_int_list"), ]: # print(old, new) - with pytest.raises(ValidationError): + with pytest.raises(DataFrameValidationError): ListModel.validate(valid_df.with_columns(pl.col(old).alias(new))) + + +if __name__ == "__main__": + test_validation_of_list_dtypes() \ No newline at end of file From 4b98da935bd23f47613fbc6f2bf78cf03ce56a96 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Thu, 26 Oct 2023 12:45:05 -0400 Subject: [PATCH 04/29] wip: subclass `FieldInfo`, extend with patito field attributes all `test_validators` now succeed, all but `test_dataframe_get_method` now pass in `test_polars` --- src/patito/polars.py | 2 +- src/patito/pydantic.py | 57 ++++++++++++++++++++++++++++++++++++++-- src/patito/validators.py | 2 +- tests/test_polars.py | 14 +++++----- tests/test_validators.py | 22 ++++++++-------- 5 files changed, 75 insertions(+), 22 deletions(-) diff --git a/src/patito/polars.py b/src/patito/polars.py index fe5bfe9..0727eb2 100644 --- a/src/patito/polars.py +++ b/src/patito/polars.py @@ -684,7 +684,7 @@ def filter( # noqa: D102 pl.Expr, str, pl.Series, list[bool], np.ndarray[Any, Any], bool ], ) -> DF: - return cast(DF, super().filter(predicate=predicate)) + return cast(DF, super().filter(predicate)) def select( # noqa: D102 self: DF, diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index c40bd8f..9376c14 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -17,12 +17,14 @@ Union, cast, Literal, - get_args + get_args, + Sequence, ) import polars as pl from polars.datatypes import PolarsDataType -from pydantic import ConfigDict, BaseModel, Field, create_model # noqa: F401 +from pydantic import fields +from pydantic import ConfigDict, BaseModel, create_model # noqa: F401 from pydantic._internal._model_construction import ModelMetaclass as PydanticModelMetaclass from patito.polars import DataFrame, LazyFrame @@ -1375,6 +1377,10 @@ def _schema_properties(cls) -> Dict[str, Dict[str, Any]]: fields[field_name]["required"] = field_name in required if 'const' in field_info and 'type' not in field_info: fields[field_name]['type'] = PYTHON_TO_PYDANTIC_TYPES[type(field_info['const'])] + for f in get_args(PT_INFO): + v = getattr(cls.model_fields[field_name], f, None) + if v is not None: + fields[field_name][f] = v return fields @@ -1425,6 +1431,53 @@ def _derive_model( __base__=Model, **new_fields, ) + +PT_INFO = Literal["constraints", "derived_from", "dtype", "unique"] + +class FieldInfo(fields.FieldInfo): + __slots__ = getattr(fields.FieldInfo, '__slots__') + ( + "constraints", + "derived_from", + "dtype", + "unique", + ) + + def __init__( + self, + constraints: Optional[Union[pl.Expr, Sequence[pl.Expr]]] = None, + derived_from: Optional[Union[str, pl.Expr]] = None, + dtype: Optional[pl.DataType] = None, + unique: bool = False, + **kwargs, + ): + super().__init__(**kwargs) + self.constraints = constraints + self.derived_from = derived_from + self.dtype = dtype + self.unique = unique + + +def Field( + *args, + **kwargs, +): + pt_kwargs = { + k: kwargs.pop(k, None) for k in get_args(PT_INFO) + } + meta_kwargs = { + k: v for k, v in kwargs.items() if k in fields.FieldInfo.metadata_lookup + } + base_kwargs = { + k: v for k, v in kwargs.items() if k not in {**pt_kwargs, **meta_kwargs} + } + finfo = fields.Field( + *args, **base_kwargs + ) + return FieldInfo( + **finfo._attributes_set, + **meta_kwargs, + **pt_kwargs, + ) class FieldDoc: diff --git a/src/patito/validators.py b/src/patito/validators.py index af873aa..357c7ab 100644 --- a/src/patito/validators.py +++ b/src/patito/validators.py @@ -271,7 +271,7 @@ def _find_errors( # noqa: C901 if isinstance(custom_constraints, pl.Expr): custom_constraints = [custom_constraints] constraints = pl.all_horizontal( - [constraint.is_not() for constraint in custom_constraints] + [constraint.not_() for constraint in custom_constraints] ) if "_" in constraints.meta.root_names(): # An underscore is an alias for the current field diff --git a/tests/test_polars.py b/tests/test_polars.py index bbfb762..ee39089 100644 --- a/tests/test_polars.py +++ b/tests/test_polars.py @@ -12,7 +12,7 @@ def test_dataframe_get_method(): """You should be able to retrieve a single row and cast to model.""" class Product(pt.Model): - product_id: int = pt.Field(json_schema_extra={"unique":True}) + product_id: int = pt.Field(unique=True) price: float df = pt.DataFrame({"product_id": [1, 2], "price": [9.99, 19.99]}) @@ -112,7 +112,7 @@ def test_dataframe_model_dtype_casting(): class DTypeModel(pt.Model): implicit_int_1: int implicit_int_2: int - explicit_uint: int = pt.Field(json_schema_extra={"dtype":pl.UInt64}) + explicit_uint: int = pt.Field(dtype=pl.UInt64) implicit_date: date implicit_datetime: datetime @@ -203,10 +203,10 @@ def test_derive_functionality(): class DerivedModel(pt.Model): underived: int - const_derived: int = pt.Field(json_schema_extra={"derived_from":pl.lit(3)}) - column_derived: int = pt.Field(json_schema_extra={"derived_from":"underived"}) - expr_derived: int = pt.Field(json_schema_extra={"derived_from":2 * pl.col("underived")}) - second_order_derived: int = pt.Field(json_schema_extra={"derived_from":2 * pl.col("expr_derived")}) + const_derived: int = pt.Field(derived_from=pl.lit(3)) + column_derived: int = pt.Field(derived_from="underived") + expr_derived: int = pt.Field(derived_from=2 * pl.col("underived")) + second_order_derived: int = pt.Field(derived_from=2 * pl.col("expr_derived")) df = DerivedModel.DataFrame({"underived": [1, 2]}) assert df.columns == ["underived"] @@ -224,7 +224,7 @@ class DerivedModel(pt.Model): # Non-compatible derive_from arguments should raise TypeError class InvalidModel(pt.Model): - incompatible: int = pt.Field(json_schema_extra={"derived_from":object}) + incompatible: int = pt.Field(derived_from=object) with pytest.raises( TypeError, diff --git a/tests/test_validators.py b/tests/test_validators.py index 8daa919..0e68e59 100644 --- a/tests/test_validators.py +++ b/tests/test_validators.py @@ -414,11 +414,11 @@ def test_validation_of_dtype_specifiers(): class DTypeModel(pt.Model): int_column: int - int_explicit_dtype_column: int = pt.Field(json_schema_extra={"dtype":pl.Int64}) - smallint_column: int = pt.Field(json_schema_extra={"dtype":pl.Int8}) - unsigned_int_column: int = pt.Field(json_schema_extra={"dtype":pl.UInt64}) - unsigned_smallint_column: int = pt.Field(json_schema_extra={"dtype":pl.UInt8}) - + int_explicit_dtype_column: int = pt.Field(dtype=pl.Int64) + smallint_column: int = pt.Field(dtype=pl.Int8) + unsigned_int_column: int = pt.Field(dtype=pl.UInt64) + unsigned_smallint_column: int = pt.Field(dtype=pl.UInt8) + assert DTypeModel.dtypes == { "int_column": pl.Int64, "int_explicit_dtype_column": pl.Int64, @@ -475,9 +475,9 @@ def test_custom_constraint_validation(): class CustomConstraintModel(pt.Model): even_int: int = pt.Field( - json_schema_extra={"constraints": [(pl.col("even_int") % 2 == 0).alias("even_constraint")]} + constraints=[(pl.col("even_int") % 2 == 0).alias("even_constraint")] ) - odd_int: int = pt.Field(json_schema_extra={"constraints":pl.col("odd_int") % 2 == 1}) + odd_int: int = pt.Field(constraints=pl.col("odd_int") % 2 == 1) df = CustomConstraintModel.DataFrame({"even_int": [2, 3], "odd_int": [1, 2]}) with pytest.raises(DataFrameValidationError) as e_info: @@ -498,7 +498,7 @@ class CustomConstraintModel(pt.Model): # We can also validate aggregation queries class PizzaSlice(pt.Model): - fraction: float = pt.Field(json_schema_extra={"constraints":pl.col("fraction").sum() == 1}) + fraction: float = pt.Field(constraints=pl.col("fraction").sum() == 1) whole_pizza = pt.DataFrame({"fraction": [0.25, 0.75]}) PizzaSlice.validate(whole_pizza) @@ -513,9 +513,9 @@ def test_anonymous_column_constraints(): class Pair(pt.Model): # pl.col("_") refers to the given field column - odd_number: int = pt.Field(json_schema_extra={"constraints":pl.col("_") % 2 == 1}) + odd_number: int = pt.Field(constraints=pl.col("_") % 2 == 1) # pt.field is simply an alias for pl.col("_") - even_number: int = pt.Field(json_schema_extra={"constraints":pt.field % 2 == 0}) + even_number: int = pt.Field(constraints=pt.field % 2 == 0) pairs = pt.DataFrame({"odd_number": [1, 3, 5], "even_number": [2, 4, 6]}) Pair.validate(pairs) @@ -604,4 +604,4 @@ class ListModel(pt.Model): if __name__ == "__main__": - test_validation_of_list_dtypes() \ No newline at end of file + test_custom_constraint_validation() \ No newline at end of file From eef74fe415fcf8756013333fa2771b081412acc1 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Thu, 26 Oct 2023 12:45:53 -0400 Subject: [PATCH 05/29] fix: update LDF.collect() for polars==0.19.8 fix: switch to *args/**kwargs --- poetry.lock | 632 ++++++++++++++++++++++--------------------- src/patito/polars.py | 21 +- 2 files changed, 322 insertions(+), 331 deletions(-) diff --git a/poetry.lock b/poetry.lock index f1d4954..e5e4b77 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "alabaster" @@ -27,13 +27,13 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} [[package]] name = "apeye" -version = "1.4.0" +version = "1.4.1" description = "Handy tools for working with URLs and APIs." optional = true python-versions = ">=3.6.1" files = [ - {file = "apeye-1.4.0-py3-none-any.whl", hash = "sha256:32f10f5629c39a0d2a4bc00b16827b43b912c56510395329cb4cc823954ec2be"}, - {file = "apeye-1.4.0.tar.gz", hash = "sha256:db616f14f1e7c09c5ff76230b6a78ebada6e34bed80596bbb9f1146d94107cdb"}, + {file = "apeye-1.4.1-py3-none-any.whl", hash = "sha256:44e58a9104ec189bf42e76b3a7fe91e2b2879d96d48e9a77e5e32ff699c9204e"}, + {file = "apeye-1.4.1.tar.gz", hash = "sha256:14ea542fad689e3bfdbda2189a354a4908e90aee4bf84c15ab75d68453d76a36"}, ] [package.dependencies] @@ -108,6 +108,9 @@ files = [ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} setuptools = {version = "*", markers = "python_version >= \"3.12\""} +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + [[package]] name = "bandit" version = "1.7.5" @@ -183,7 +186,7 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -232,13 +235,13 @@ redis = ["redis (>=2.10.5)"] [[package]] name = "certifi" -version = "2023.5.7" +version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, - {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, ] [[package]] @@ -382,71 +385,63 @@ files = [ [[package]] name = "coverage" -version = "7.2.7" +version = "7.3.2" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, - {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, - {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, - {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, - {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, - {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, - {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, - {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, - {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, - {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, - {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, - {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, - {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, - {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, - {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, - {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, - {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, - {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, ] [package.dependencies] @@ -518,63 +513,50 @@ dates = ["pytz (>=2019.1)"] [[package]] name = "duckdb" -version = "0.8.1" +version = "0.9.1" description = "DuckDB embedded database" optional = true -python-versions = "*" +python-versions = ">=3.7.0" files = [ - {file = "duckdb-0.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:14781d21580ee72aba1f5dcae7734674c9b6c078dd60470a08b2b420d15b996d"}, - {file = "duckdb-0.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f13bf7ab0e56ddd2014ef762ae4ee5ea4df5a69545ce1191b8d7df8118ba3167"}, - {file = "duckdb-0.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4032042d8363e55365bbca3faafc6dc336ed2aad088f10ae1a534ebc5bcc181"}, - {file = "duckdb-0.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a71bd8f0b0ca77c27fa89b99349ef22599ffefe1e7684ae2e1aa2904a08684"}, - {file = "duckdb-0.8.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24568d6e48f3dbbf4a933109e323507a46b9399ed24c5d4388c4987ddc694fd0"}, - {file = "duckdb-0.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297226c0dadaa07f7c5ae7cbdb9adba9567db7b16693dbd1b406b739ce0d7924"}, - {file = "duckdb-0.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5792cf777ece2c0591194006b4d3e531f720186102492872cb32ddb9363919cf"}, - {file = "duckdb-0.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:12803f9f41582b68921d6b21f95ba7a51e1d8f36832b7d8006186f58c3d1b344"}, - {file = "duckdb-0.8.1-cp310-cp310-win32.whl", hash = "sha256:d0953d5a2355ddc49095e7aef1392b7f59c5be5cec8cdc98b9d9dc1f01e7ce2b"}, - {file = "duckdb-0.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:6e6583c98a7d6637e83bcadfbd86e1f183917ea539f23b6b41178f32f813a5eb"}, - {file = "duckdb-0.8.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fad7ed0d4415f633d955ac24717fa13a500012b600751d4edb050b75fb940c25"}, - {file = "duckdb-0.8.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81ae602f34d38d9c48dd60f94b89f28df3ef346830978441b83c5b4eae131d08"}, - {file = "duckdb-0.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d75cfe563aaa058d3b4ccaaa371c6271e00e3070df5de72361fd161b2fe6780"}, - {file = "duckdb-0.8.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dbb55e7a3336f2462e5e916fc128c47fe1c03b6208d6bd413ac11ed95132aa0"}, - {file = "duckdb-0.8.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6df53efd63b6fdf04657385a791a4e3c4fb94bfd5db181c4843e2c46b04fef5"}, - {file = "duckdb-0.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b188b80b70d1159b17c9baaf541c1799c1ce8b2af4add179a9eed8e2616be96"}, - {file = "duckdb-0.8.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5ad481ee353f31250b45d64b4a104e53b21415577943aa8f84d0af266dc9af85"}, - {file = "duckdb-0.8.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1d1b1729993611b1892509d21c21628917625cdbe824a61ce891baadf684b32"}, - {file = "duckdb-0.8.1-cp311-cp311-win32.whl", hash = "sha256:2d8f9cc301e8455a4f89aa1088b8a2d628f0c1f158d4cf9bc78971ed88d82eea"}, - {file = "duckdb-0.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:07457a43605223f62d93d2a5a66b3f97731f79bbbe81fdd5b79954306122f612"}, - {file = "duckdb-0.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d2c8062c3e978dbcd80d712ca3e307de8a06bd4f343aa457d7dd7294692a3842"}, - {file = "duckdb-0.8.1-cp36-cp36m-win32.whl", hash = "sha256:fad486c65ae944eae2de0d590a0a4fb91a9893df98411d66cab03359f9cba39b"}, - {file = "duckdb-0.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:86fa4506622c52d2df93089c8e7075f1c4d0ba56f4bf27faebde8725355edf32"}, - {file = "duckdb-0.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:60e07a62782f88420046e30cc0e3de842d0901c4fd5b8e4d28b73826ec0c3f5e"}, - {file = "duckdb-0.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f18563675977f8cbf03748efee0165b4c8ef64e0cbe48366f78e2914d82138bb"}, - {file = "duckdb-0.8.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16e179443832bea8439ae4dff93cf1e42c545144ead7a4ef5f473e373eea925a"}, - {file = "duckdb-0.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a413d5267cb41a1afe69d30dd6d4842c588256a6fed7554c7e07dad251ede095"}, - {file = "duckdb-0.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3784680df59eadd683b0a4c2375d451a64470ca54bd171c01e36951962b1d332"}, - {file = "duckdb-0.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:67a1725c2b01f9b53571ecf3f92959b652f60156c1c48fb35798302e39b3c1a2"}, - {file = "duckdb-0.8.1-cp37-cp37m-win32.whl", hash = "sha256:197d37e2588c5ad063e79819054eedb7550d43bf1a557d03ba8f8f67f71acc42"}, - {file = "duckdb-0.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3843feb79edf100800f5037c32d5d5a5474fb94b32ace66c707b96605e7c16b2"}, - {file = "duckdb-0.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:624c889b0f2d656794757b3cc4fc58030d5e285f5ad2ef9fba1ea34a01dab7fb"}, - {file = "duckdb-0.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fcbe3742d77eb5add2d617d487266d825e663270ef90253366137a47eaab9448"}, - {file = "duckdb-0.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47516c9299d09e9dbba097b9fb339b389313c4941da5c54109df01df0f05e78c"}, - {file = "duckdb-0.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf1ba718b7522d34399446ebd5d4b9fcac0b56b6ac07bfebf618fd190ec37c1d"}, - {file = "duckdb-0.8.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e36e35d38a9ae798fe8cf6a839e81494d5b634af89f4ec9483f4d0a313fc6bdb"}, - {file = "duckdb-0.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23493313f88ce6e708a512daacad13e83e6d1ea0be204b175df1348f7fc78671"}, - {file = "duckdb-0.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1fb9bf0b6f63616c8a4b9a6a32789045e98c108df100e6bac783dc1e36073737"}, - {file = "duckdb-0.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:12fc13ecd5eddd28b203b9e3999040d3a7374a8f4b833b04bd26b8c5685c2635"}, - {file = "duckdb-0.8.1-cp38-cp38-win32.whl", hash = "sha256:a12bf4b18306c9cb2c9ba50520317e6cf2de861f121d6f0678505fa83468c627"}, - {file = "duckdb-0.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:e4e809358b9559c00caac4233e0e2014f3f55cd753a31c4bcbbd1b55ad0d35e4"}, - {file = "duckdb-0.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7acedfc00d97fbdb8c3d120418c41ef3cb86ef59367f3a9a30dff24470d38680"}, - {file = "duckdb-0.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:99bfe264059cdc1e318769103f656f98e819cd4e231cd76c1d1a0327f3e5cef8"}, - {file = "duckdb-0.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:538b225f361066231bc6cd66c04a5561de3eea56115a5dd773e99e5d47eb1b89"}, - {file = "duckdb-0.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae0be3f71a18cd8492d05d0fc1bc67d01d5a9457b04822d025b0fc8ee6efe32e"}, - {file = "duckdb-0.8.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd82ba63b58672e46c8ec60bc9946aa4dd7b77f21c1ba09633d8847ad9eb0d7b"}, - {file = "duckdb-0.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:780a34559aaec8354e83aa4b7b31b3555f1b2cf75728bf5ce11b89a950f5cdd9"}, - {file = "duckdb-0.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:01f0d4e9f7103523672bda8d3f77f440b3e0155dd3b2f24997bc0c77f8deb460"}, - {file = "duckdb-0.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:31f692decb98c2d57891da27180201d9e93bb470a3051fcf413e8da65bca37a5"}, - {file = "duckdb-0.8.1-cp39-cp39-win32.whl", hash = "sha256:e7fe93449cd309bbc67d1bf6f6392a6118e94a9a4479ab8a80518742e855370a"}, - {file = "duckdb-0.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:81d670bc6807672f038332d9bf587037aabdd741b0810de191984325ed307abd"}, - {file = "duckdb-0.8.1.tar.gz", hash = "sha256:a54d37f4abc2afc4f92314aaa56ecf215a411f40af4bffe1e86bd25e62aceee9"}, + {file = "duckdb-0.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6c724e105ecd78c8d86b3c03639b24e1df982392fc836705eb007e4b1b488864"}, + {file = "duckdb-0.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:75f12c5a3086079fb6440122565f1762ef1a610a954f2d8081014c1dd0646e1a"}, + {file = "duckdb-0.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:151f5410c32f8f8fe03bf23462b9604349bc0b4bd3a51049bbf5e6a482a435e8"}, + {file = "duckdb-0.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c1d066fdae22b9b711b1603541651a378017645f9fbc4adc9764b2f3c9e9e4a"}, + {file = "duckdb-0.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1de56d8b7bd7a7653428c1bd4b8948316df488626d27e9c388194f2e0d1428d4"}, + {file = "duckdb-0.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1fb6cd590b1bb4e31fde8efd25fedfbfa19a86fa72789fa5b31a71da0d95bce4"}, + {file = "duckdb-0.9.1-cp310-cp310-win32.whl", hash = "sha256:1039e073714d668cef9069bb02c2a6756c7969cedda0bff1332520c4462951c8"}, + {file = "duckdb-0.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:7e6ac4c28918e1d278a89ff26fd528882aa823868ed530df69d6c8a193ae4e41"}, + {file = "duckdb-0.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5eb750f2ee44397a61343f32ee9d9e8c8b5d053fa27ba4185d0e31507157f130"}, + {file = "duckdb-0.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aea2a46881d75dc069a242cb164642d7a4f792889010fb98210953ab7ff48849"}, + {file = "duckdb-0.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed3dcedfc7a9449b6d73f9a2715c730180056e0ba837123e7967be1cd3935081"}, + {file = "duckdb-0.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c55397bed0087ec4445b96f8d55f924680f6d40fbaa7f2e35468c54367214a5"}, + {file = "duckdb-0.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3261696130f1cfb955735647c93297b4a6241753fb0de26c05d96d50986c6347"}, + {file = "duckdb-0.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:64c04b1728e3e37cf93748829b5d1e028227deea75115bb5ead01c608ece44b1"}, + {file = "duckdb-0.9.1-cp311-cp311-win32.whl", hash = "sha256:12cf9fb441a32702e31534330a7b4d569083d46a91bf185e0c9415000a978789"}, + {file = "duckdb-0.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:fdfd85575ce9540e593d5d25c9d32050bd636c27786afd7b776aae0f6432b55e"}, + {file = "duckdb-0.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:704700a4b469e3bb1a7e85ac12e58037daaf2b555ef64a3fe2913ffef7bd585b"}, + {file = "duckdb-0.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf55b303b7b1a8c2165a96e609eb30484bc47481d94a5fb1e23123e728df0a74"}, + {file = "duckdb-0.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b70e23c14746904ca5de316436e43a685eb769c67fe3dbfaacbd3cce996c5045"}, + {file = "duckdb-0.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:77379f7f1f8b4dc98e01f8f6f8f15a0858cf456e2385e22507f3cb93348a88f9"}, + {file = "duckdb-0.9.1-cp37-cp37m-win32.whl", hash = "sha256:92c8f738489838666cae9ef41703f8b16f660bb146970d1eba8b2c06cb3afa39"}, + {file = "duckdb-0.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08c5484ac06ab714f745526d791141f547e2f5ac92f97a0a1b37dfbb3ea1bd13"}, + {file = "duckdb-0.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f66d3c07c7f6938d3277294677eb7dad75165e7c57c8dd505503fc5ef10f67ad"}, + {file = "duckdb-0.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c38044e5f78c0c7b58e9f937dcc6c34de17e9ca6be42f9f8f1a5a239f7a847a5"}, + {file = "duckdb-0.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73bc0d715b79566b3ede00c367235cfcce67be0eddda06e17665c7a233d6854a"}, + {file = "duckdb-0.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d26622c3b4ea6a8328d95882059e3cc646cdc62d267d48d09e55988a3bba0165"}, + {file = "duckdb-0.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3367d10096ff2b7919cedddcf60d308d22d6e53e72ee2702f6e6ca03d361004a"}, + {file = "duckdb-0.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d88a119f1cb41911a22f08a6f084d061a8c864e28b9433435beb50a56b0d06bb"}, + {file = "duckdb-0.9.1-cp38-cp38-win32.whl", hash = "sha256:99567496e45b55c67427133dc916013e8eb20a811fc7079213f5f03b2a4f5fc0"}, + {file = "duckdb-0.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:5b3da4da73422a3235c3500b3fb541ac546adb3e35642ef1119dbcd9cc7f68b8"}, + {file = "duckdb-0.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eca00c0c2062c0265c6c0e78ca2f6a30611b28f3afef062036610e9fc9d4a67d"}, + {file = "duckdb-0.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eb5af8e89d40fc4baab1515787ea1520a6c6cf6aa40ab9f107df6c3a75686ce1"}, + {file = "duckdb-0.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fae3d4f83ebcb47995f6acad7c6d57d003a9b6f0e1b31f79a3edd6feb377443"}, + {file = "duckdb-0.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16b9a7efc745bc3c5d1018c3a2f58d9e6ce49c0446819a9600fdba5f78e54c47"}, + {file = "duckdb-0.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b0b60167f5537772e9f5af940e69dcf50e66f5247732b8bb84a493a9af6055"}, + {file = "duckdb-0.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4f27f5e94c47df6c4ccddf18e3277b7464eea3db07356d2c4bf033b5c88359b8"}, + {file = "duckdb-0.9.1-cp39-cp39-win32.whl", hash = "sha256:d43cd7e6f783006b59dcc5e40fcf157d21ee3d0c8dfced35278091209e9974d7"}, + {file = "duckdb-0.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:e666795887d9cf1d6b6f6cbb9d487270680e5ff6205ebc54b2308151f13b8cff"}, + {file = "duckdb-0.9.1.tar.gz", hash = "sha256:603a878746015a3f2363a65eb48bcbec816261b6ee8d71eee53061117f6eef9d"}, ] [[package]] @@ -593,18 +575,19 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.12.2" +version = "3.12.4" description = "A platform independent file lock." optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, - {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, + {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, + {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, ] [package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] +typing = ["typing-extensions (>=4.7.1)"] [[package]] name = "flake8" @@ -693,13 +676,12 @@ dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", [[package]] name = "flake8-isort" -version = "6.0.0" +version = "6.1.0" description = "flake8 plugin that integrates isort ." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "flake8-isort-6.0.0.tar.gz", hash = "sha256:537f453a660d7e903f602ecfa36136b140de279df58d02eb1b6a0c84e83c528c"}, - {file = "flake8_isort-6.0.0-py3-none-any.whl", hash = "sha256:aa0cac02a62c7739e370ce6b9c31743edac904bae4b157274511fc8a19c75bbc"}, + {file = "flake8-isort-6.1.0.tar.gz", hash = "sha256:d4639343bac540194c59fb1618ac2c285b3e27609f353bef6f50904d40c1643e"}, ] [package.dependencies] @@ -876,17 +858,6 @@ files = [ six = "*" tornado = {version = "*", markers = "python_version > \"2.7\""} -[[package]] -name = "lockfile" -version = "0.12.2" -description = "Platform-independent file locking module" -optional = true -python-versions = "*" -files = [ - {file = "lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa"}, - {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"}, -] - [[package]] name = "markdown-it-py" version = "3.0.0" @@ -994,85 +965,78 @@ files = [ [[package]] name = "more-itertools" -version = "9.1.0" +version = "10.1.0" description = "More routines for operating on iterables, beyond itertools" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "more-itertools-9.1.0.tar.gz", hash = "sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d"}, - {file = "more_itertools-9.1.0-py3-none-any.whl", hash = "sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3"}, + {file = "more-itertools-10.1.0.tar.gz", hash = "sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a"}, + {file = "more_itertools-10.1.0-py3-none-any.whl", hash = "sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6"}, ] [[package]] name = "msgpack" -version = "1.0.5" +version = "1.0.7" description = "MessagePack serializer" optional = true -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:525228efd79bb831cf6830a732e2e80bc1b05436b086d4264814b4b2955b2fa9"}, - {file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4f8d8b3bf1ff2672567d6b5c725a1b347fe838b912772aa8ae2bf70338d5a198"}, - {file = "msgpack-1.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdc793c50be3f01106245a61b739328f7dccc2c648b501e237f0699fe1395b81"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cb47c21a8a65b165ce29f2bec852790cbc04936f502966768e4aae9fa763cb7"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e42b9594cc3bf4d838d67d6ed62b9e59e201862a25e9a157019e171fbe672dd3"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b56a24893105dc52c1253649b60f475f36b3aa0fc66115bffafb624d7cb30b"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1967f6129fc50a43bfe0951c35acbb729be89a55d849fab7686004da85103f1c"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20a97bf595a232c3ee6d57ddaadd5453d174a52594bf9c21d10407e2a2d9b3bd"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d25dd59bbbbb996eacf7be6b4ad082ed7eacc4e8f3d2df1ba43822da9bfa122a"}, - {file = "msgpack-1.0.5-cp310-cp310-win32.whl", hash = "sha256:382b2c77589331f2cb80b67cc058c00f225e19827dbc818d700f61513ab47bea"}, - {file = "msgpack-1.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:4867aa2df9e2a5fa5f76d7d5565d25ec76e84c106b55509e78c1ede0f152659a"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9f5ae84c5c8a857ec44dc180a8b0cc08238e021f57abdf51a8182e915e6299f0"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e6ca5d5699bcd89ae605c150aee83b5321f2115695e741b99618f4856c50898"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5494ea30d517a3576749cad32fa27f7585c65f5f38309c88c6d137877fa28a5a"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ab2f3331cb1b54165976a9d976cb251a83183631c88076613c6c780f0d6e45a"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28592e20bbb1620848256ebc105fc420436af59515793ed27d5c77a217477705"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe5c63197c55bce6385d9aee16c4d0641684628f63ace85f73571e65ad1c1e8d"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed40e926fa2f297e8a653c954b732f125ef97bdd4c889f243182299de27e2aa9"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b2de4c1c0538dcb7010902a2b97f4e00fc4ddf2c8cda9749af0e594d3b7fa3d7"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bf22a83f973b50f9d38e55c6aade04c41ddda19b00c4ebc558930d78eecc64ed"}, - {file = "msgpack-1.0.5-cp311-cp311-win32.whl", hash = "sha256:c396e2cc213d12ce017b686e0f53497f94f8ba2b24799c25d913d46c08ec422c"}, - {file = "msgpack-1.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c4c68d87497f66f96d50142a2b73b97972130d93677ce930718f68828b382e2"}, - {file = "msgpack-1.0.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a2b031c2e9b9af485d5e3c4520f4220d74f4d222a5b8dc8c1a3ab9448ca79c57"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f837b93669ce4336e24d08286c38761132bc7ab29782727f8557e1eb21b2080"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1d46dfe3832660f53b13b925d4e0fa1432b00f5f7210eb3ad3bb9a13c6204a6"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:366c9a7b9057e1547f4ad51d8facad8b406bab69c7d72c0eb6f529cf76d4b85f"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4c075728a1095efd0634a7dccb06204919a2f67d1893b6aa8e00497258bf926c"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:f933bbda5a3ee63b8834179096923b094b76f0c7a73c1cfe8f07ad608c58844b"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:36961b0568c36027c76e2ae3ca1132e35123dcec0706c4b7992683cc26c1320c"}, - {file = "msgpack-1.0.5-cp36-cp36m-win32.whl", hash = "sha256:b5ef2f015b95f912c2fcab19c36814963b5463f1fb9049846994b007962743e9"}, - {file = "msgpack-1.0.5-cp36-cp36m-win_amd64.whl", hash = "sha256:288e32b47e67f7b171f86b030e527e302c91bd3f40fd9033483f2cacc37f327a"}, - {file = "msgpack-1.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:137850656634abddfb88236008339fdaba3178f4751b28f270d2ebe77a563b6c"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c05a4a96585525916b109bb85f8cb6511db1c6f5b9d9cbcbc940dc6b4be944b"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56a62ec00b636583e5cb6ad313bbed36bb7ead5fa3a3e38938503142c72cba4f"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef8108f8dedf204bb7b42994abf93882da1159728a2d4c5e82012edd92c9da9f"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1835c84d65f46900920b3708f5ba829fb19b1096c1800ad60bae8418652a951d"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e57916ef1bd0fee4f21c4600e9d1da352d8816b52a599c46460e93a6e9f17086"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:17358523b85973e5f242ad74aa4712b7ee560715562554aa2134d96e7aa4cbbf"}, - {file = "msgpack-1.0.5-cp37-cp37m-win32.whl", hash = "sha256:cb5aaa8c17760909ec6cb15e744c3ebc2ca8918e727216e79607b7bbce9c8f77"}, - {file = "msgpack-1.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:ab31e908d8424d55601ad7075e471b7d0140d4d3dd3272daf39c5c19d936bd82"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b72d0698f86e8d9ddf9442bdedec15b71df3598199ba33322d9711a19f08145c"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:379026812e49258016dd84ad79ac8446922234d498058ae1d415f04b522d5b2d"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:332360ff25469c346a1c5e47cbe2a725517919892eda5cfaffe6046656f0b7bb"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:476a8fe8fae289fdf273d6d2a6cb6e35b5a58541693e8f9f019bfe990a51e4ba"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9985b214f33311df47e274eb788a5893a761d025e2b92c723ba4c63936b69b1"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48296af57cdb1d885843afd73c4656be5c76c0c6328db3440c9601a98f303d87"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:addab7e2e1fcc04bd08e4eb631c2a90960c340e40dfc4a5e24d2ff0d5a3b3edb"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:916723458c25dfb77ff07f4c66aed34e47503b2eb3188b3adbec8d8aa6e00f48"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:821c7e677cc6acf0fd3f7ac664c98803827ae6de594a9f99563e48c5a2f27eb0"}, - {file = "msgpack-1.0.5-cp38-cp38-win32.whl", hash = "sha256:1c0f7c47f0087ffda62961d425e4407961a7ffd2aa004c81b9c07d9269512f6e"}, - {file = "msgpack-1.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:bae7de2026cbfe3782c8b78b0db9cbfc5455e079f1937cb0ab8d133496ac55e1"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:20c784e66b613c7f16f632e7b5e8a1651aa5702463d61394671ba07b2fc9e025"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:266fa4202c0eb94d26822d9bfd7af25d1e2c088927fe8de9033d929dd5ba24c5"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18334484eafc2b1aa47a6d42427da7fa8f2ab3d60b674120bce7a895a0a85bdd"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57e1f3528bd95cc44684beda696f74d3aaa8a5e58c816214b9046512240ef437"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586d0d636f9a628ddc6a17bfd45aa5b5efaf1606d2b60fa5d87b8986326e933f"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a740fa0e4087a734455f0fc3abf5e746004c9da72fbd541e9b113013c8dc3282"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3055b0455e45810820db1f29d900bf39466df96ddca11dfa6d074fa47054376d"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a61215eac016f391129a013c9e46f3ab308db5f5ec9f25811e811f96962599a8"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:362d9655cd369b08fda06b6657a303eb7172d5279997abe094512e919cf74b11"}, - {file = "msgpack-1.0.5-cp39-cp39-win32.whl", hash = "sha256:ac9dd47af78cae935901a9a500104e2dea2e253207c924cc95de149606dc43cc"}, - {file = "msgpack-1.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:06f5174b5f8ed0ed919da0e62cbd4ffde676a374aba4020034da05fab67b9164"}, - {file = "msgpack-1.0.5.tar.gz", hash = "sha256:c075544284eadc5cddc70f4757331d99dcbc16b2bbd4849d15f8aae4cf36d31c"}, + {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04ad6069c86e531682f9e1e71b71c1c3937d6014a7c3e9edd2aa81ad58842862"}, + {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cca1b62fe70d761a282496b96a5e51c44c213e410a964bdffe0928e611368329"}, + {file = "msgpack-1.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e50ebce52f41370707f1e21a59514e3375e3edd6e1832f5e5235237db933c98b"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b4f35de6a304b5533c238bee86b670b75b03d31b7797929caa7a624b5dda6"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28efb066cde83c479dfe5a48141a53bc7e5f13f785b92ddde336c716663039ee"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cb14ce54d9b857be9591ac364cb08dc2d6a5c4318c1182cb1d02274029d590d"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b573a43ef7c368ba4ea06050a957c2a7550f729c31f11dd616d2ac4aba99888d"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ccf9a39706b604d884d2cb1e27fe973bc55f2890c52f38df742bc1d79ab9f5e1"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb70766519500281815dfd7a87d3a178acf7ce95390544b8c90587d76b227681"}, + {file = "msgpack-1.0.7-cp310-cp310-win32.whl", hash = "sha256:b610ff0f24e9f11c9ae653c67ff8cc03c075131401b3e5ef4b82570d1728f8a9"}, + {file = "msgpack-1.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:a40821a89dc373d6427e2b44b572efc36a2778d3f543299e2f24eb1a5de65415"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:576eb384292b139821c41995523654ad82d1916da6a60cff129c715a6223ea84"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:730076207cb816138cf1af7f7237b208340a2c5e749707457d70705715c93b93"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:85765fdf4b27eb5086f05ac0491090fc76f4f2b28e09d9350c31aac25a5aaff8"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3476fae43db72bd11f29a5147ae2f3cb22e2f1a91d575ef130d2bf49afd21c46"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d4c80667de2e36970ebf74f42d1088cc9ee7ef5f4e8c35eee1b40eafd33ca5b"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b0bf0effb196ed76b7ad883848143427a73c355ae8e569fa538365064188b8e"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f9a7c509542db4eceed3dcf21ee5267ab565a83555c9b88a8109dcecc4709002"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:84b0daf226913133f899ea9b30618722d45feffa67e4fe867b0b5ae83a34060c"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ec79ff6159dffcc30853b2ad612ed572af86c92b5168aa3fc01a67b0fa40665e"}, + {file = "msgpack-1.0.7-cp311-cp311-win32.whl", hash = "sha256:3e7bf4442b310ff154b7bb9d81eb2c016b7d597e364f97d72b1acc3817a0fdc1"}, + {file = "msgpack-1.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:3f0c8c6dfa6605ab8ff0611995ee30d4f9fcff89966cf562733b4008a3d60d82"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f0936e08e0003f66bfd97e74ee530427707297b0d0361247e9b4f59ab78ddc8b"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98bbd754a422a0b123c66a4c341de0474cad4a5c10c164ceed6ea090f3563db4"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b291f0ee7961a597cbbcc77709374087fa2a9afe7bdb6a40dbbd9b127e79afee"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebbbba226f0a108a7366bf4b59bf0f30a12fd5e75100c630267d94d7f0ad20e5"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e2d69948e4132813b8d1131f29f9101bc2c915f26089a6d632001a5c1349672"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdf38ba2d393c7911ae989c3bbba510ebbcdf4ecbdbfec36272abe350c454075"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:993584fc821c58d5993521bfdcd31a4adf025c7d745bbd4d12ccfecf695af5ba"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:52700dc63a4676669b341ba33520f4d6e43d3ca58d422e22ba66d1736b0a6e4c"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e45ae4927759289c30ccba8d9fdce62bb414977ba158286b5ddaf8df2cddb5c5"}, + {file = "msgpack-1.0.7-cp312-cp312-win32.whl", hash = "sha256:27dcd6f46a21c18fa5e5deed92a43d4554e3df8d8ca5a47bf0615d6a5f39dbc9"}, + {file = "msgpack-1.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:7687e22a31e976a0e7fc99c2f4d11ca45eff652a81eb8c8085e9609298916dcf"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5b6ccc0c85916998d788b295765ea0e9cb9aac7e4a8ed71d12e7d8ac31c23c95"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:235a31ec7db685f5c82233bddf9858748b89b8119bf4538d514536c485c15fe0"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab3db8bab4b7e635c1c97270d7a4b2a90c070b33cbc00c99ef3f9be03d3e1f7"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bfdd914e55e0d2c9e1526de210f6fe8ffe9705f2b1dfcc4aecc92a4cb4b533d"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36e17c4592231a7dbd2ed09027823ab295d2791b3b1efb2aee874b10548b7524"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38949d30b11ae5f95c3c91917ee7a6b239f5ec276f271f28638dec9156f82cfc"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ff1d0899f104f3921d94579a5638847f783c9b04f2d5f229392ca77fba5b82fc"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dc43f1ec66eb8440567186ae2f8c447d91e0372d793dfe8c222aec857b81a8cf"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dd632777ff3beaaf629f1ab4396caf7ba0bdd075d948a69460d13d44357aca4c"}, + {file = "msgpack-1.0.7-cp38-cp38-win32.whl", hash = "sha256:4e71bc4416de195d6e9b4ee93ad3f2f6b2ce11d042b4d7a7ee00bbe0358bd0c2"}, + {file = "msgpack-1.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:8f5b234f567cf76ee489502ceb7165c2a5cecec081db2b37e35332b537f8157c"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfef2bb6ef068827bbd021017a107194956918ab43ce4d6dc945ffa13efbc25f"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484ae3240666ad34cfa31eea7b8c6cd2f1fdaae21d73ce2974211df099a95d81"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3967e4ad1aa9da62fd53e346ed17d7b2e922cba5ab93bdd46febcac39be636fc"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dd178c4c80706546702c59529ffc005681bd6dc2ea234c450661b205445a34d"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ffbc252eb0d229aeb2f9ad051200668fc3a9aaa8994e49f0cb2ffe2b7867e7"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:822ea70dc4018c7e6223f13affd1c5c30c0f5c12ac1f96cd8e9949acddb48a61"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:384d779f0d6f1b110eae74cb0659d9aa6ff35aaf547b3955abf2ab4c901c4819"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f64e376cd20d3f030190e8c32e1c64582eba56ac6dc7d5b0b49a9d44021b52fd"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ed82f5a7af3697b1c4786053736f24a0efd0a1b8a130d4c7bfee4b9ded0f08f"}, + {file = "msgpack-1.0.7-cp39-cp39-win32.whl", hash = "sha256:f26a07a6e877c76a88e3cecac8531908d980d3d5067ff69213653649ec0f60ad"}, + {file = "msgpack-1.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:1dc93e8e4653bdb5910aed79f11e165c85732067614f180f70534f056da97db3"}, + {file = "msgpack-1.0.7.tar.gz", hash = "sha256:572efc93db7a4d27e404501975ca6d2d9775705c2d922390d878fcf768d92c87"}, ] [[package]] @@ -1080,7 +1044,7 @@ name = "mypy" version = "1.6.1" description = "Optional static typing for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, @@ -1119,7 +1083,6 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] [[package]] @@ -1236,15 +1199,49 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] +[[package]] +name = "numpy" +version = "1.25.2" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.25.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db3ccc4e37a6873045580d413fe79b68e47a681af8db2e046f1dacfa11f86eb3"}, + {file = "numpy-1.25.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:90319e4f002795ccfc9050110bbbaa16c944b1c37c0baeea43c5fb881693ae1f"}, + {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4a913e29b418d096e696ddd422d8a5d13ffba4ea91f9f60440a3b759b0187"}, + {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f08f2e037bba04e707eebf4bc934f1972a315c883a9e0ebfa8a7756eabf9e357"}, + {file = "numpy-1.25.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bec1e7213c7cb00d67093247f8c4db156fd03075f49876957dca4711306d39c9"}, + {file = "numpy-1.25.2-cp310-cp310-win32.whl", hash = "sha256:7dc869c0c75988e1c693d0e2d5b26034644399dd929bc049db55395b1379e044"}, + {file = "numpy-1.25.2-cp310-cp310-win_amd64.whl", hash = "sha256:834b386f2b8210dca38c71a6e0f4fd6922f7d3fcff935dbe3a570945acb1b545"}, + {file = "numpy-1.25.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5462d19336db4560041517dbb7759c21d181a67cb01b36ca109b2ae37d32418"}, + {file = "numpy-1.25.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5652ea24d33585ea39eb6a6a15dac87a1206a692719ff45d53c5282e66d4a8f"}, + {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d60fbae8e0019865fc4784745814cff1c421df5afee233db6d88ab4f14655a2"}, + {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e7f0f7f6d0eee8364b9a6304c2845b9c491ac706048c7e8cf47b83123b8dbf"}, + {file = "numpy-1.25.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bb33d5a1cf360304754913a350edda36d5b8c5331a8237268c48f91253c3a364"}, + {file = "numpy-1.25.2-cp311-cp311-win32.whl", hash = "sha256:5883c06bb92f2e6c8181df7b39971a5fb436288db58b5a1c3967702d4278691d"}, + {file = "numpy-1.25.2-cp311-cp311-win_amd64.whl", hash = "sha256:5c97325a0ba6f9d041feb9390924614b60b99209a71a69c876f71052521d42a4"}, + {file = "numpy-1.25.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b79e513d7aac42ae918db3ad1341a015488530d0bb2a6abcbdd10a3a829ccfd3"}, + {file = "numpy-1.25.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eb942bfb6f84df5ce05dbf4b46673ffed0d3da59f13635ea9b926af3deb76926"}, + {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e0746410e73384e70d286f93abf2520035250aad8c5714240b0492a7302fdca"}, + {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7806500e4f5bdd04095e849265e55de20d8cc4b661b038957354327f6d9b295"}, + {file = "numpy-1.25.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b77775f4b7df768967a7c8b3567e309f617dd5e99aeb886fa14dc1a0791141f"}, + {file = "numpy-1.25.2-cp39-cp39-win32.whl", hash = "sha256:2792d23d62ec51e50ce4d4b7d73de8f67a2fd3ea710dcbc8563a51a03fb07b01"}, + {file = "numpy-1.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:76b4115d42a7dfc5d485d358728cdd8719be33cc5ec6ec08632a5d6fca2ed380"}, + {file = "numpy-1.25.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a1329e26f46230bf77b02cc19e900db9b52f398d6722ca853349a782d4cff55"}, + {file = "numpy-1.25.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3abc71e8b6edba80a01a52e66d83c5d14433cbcd26a40c329ec7ed09f37901"}, + {file = "numpy-1.25.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b9735c27cea5d995496f46a8b1cd7b408b3f34b6d50459d9ac8fe3a20cc17bf"}, + {file = "numpy-1.25.2.tar.gz", hash = "sha256:fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760"}, +] + [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -1284,7 +1281,7 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.2" @@ -1316,28 +1313,31 @@ xml = ["lxml (>=4.6.3)"] [[package]] name = "pandas-stubs" -version = "2.0.2.230605" +version = "2.0.3.230814" description = "Type annotations for pandas" optional = false python-versions = ">=3.8" files = [ - {file = "pandas_stubs-2.0.2.230605-py3-none-any.whl", hash = "sha256:39106b602f3cb6dc5f728b84e1b32bde6ecf41ee34ee714c66228009609fbada"}, - {file = "pandas_stubs-2.0.2.230605.tar.gz", hash = "sha256:624c7bb06d38145a44b61be459ccd19b038e0bf20364a025ecaab78fea65e858"}, + {file = "pandas_stubs-2.0.3.230814-py3-none-any.whl", hash = "sha256:4b3dfc027d49779176b7daa031a3405f7b839bcb6e312f4b9f29fea5feec5b4f"}, + {file = "pandas_stubs-2.0.3.230814.tar.gz", hash = "sha256:1d5cc09e36e3d9f9a1ed9dceae4e03eeb26d1b898dd769996925f784365c8769"}, ] [package.dependencies] -numpy = ">=1.24.3" +numpy = [ + {version = "<=1.24.3", markers = "python_full_version <= \"3.8.0\""}, + {version = ">=1.25.0", markers = "python_version >= \"3.9\""}, +] types-pytz = ">=2022.1.1" [[package]] name = "pathspec" -version = "0.11.1" +version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] [[package]] @@ -1363,18 +1363,18 @@ files = [ ] [package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "pluggy" -version = "1.2.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -1403,8 +1403,10 @@ cloudpickle = ["cloudpickle"] connectorx = ["connectorx"] deltalake = ["deltalake (>=0.10.0)"] fsspec = ["fsspec"] +gevent = ["gevent"] matplotlib = ["matplotlib"] numpy = ["numpy (>=1.16.0)"] +openpyxl = ["openpyxl (>=3.0.0)"] pandas = ["pandas", "pyarrow (>=7.0.0)"] pyarrow = ["pyarrow (>=7.0.0)"] pydantic = ["pydantic"] @@ -1417,36 +1419,40 @@ xlsxwriter = ["xlsxwriter"] [[package]] name = "pyarrow" -version = "12.0.1" +version = "13.0.0" description = "Python library for Apache Arrow" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pyarrow-12.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:6d288029a94a9bb5407ceebdd7110ba398a00412c5b0155ee9813a40d246c5df"}, - {file = "pyarrow-12.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345e1828efdbd9aa4d4de7d5676778aba384a2c3add896d995b23d368e60e5af"}, - {file = "pyarrow-12.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d6009fdf8986332b2169314da482baed47ac053311c8934ac6651e614deacd6"}, - {file = "pyarrow-12.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d3c4cbbf81e6dd23fe921bc91dc4619ea3b79bc58ef10bce0f49bdafb103daf"}, - {file = "pyarrow-12.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdacf515ec276709ac8042c7d9bd5be83b4f5f39c6c037a17a60d7ebfd92c890"}, - {file = "pyarrow-12.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:749be7fd2ff260683f9cc739cb862fb11be376de965a2a8ccbf2693b098db6c7"}, - {file = "pyarrow-12.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6895b5fb74289d055c43db3af0de6e16b07586c45763cb5e558d38b86a91e3a7"}, - {file = "pyarrow-12.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1887bdae17ec3b4c046fcf19951e71b6a619f39fa674f9881216173566c8f718"}, - {file = "pyarrow-12.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c9cb8eeabbadf5fcfc3d1ddea616c7ce893db2ce4dcef0ac13b099ad7ca082"}, - {file = "pyarrow-12.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:ce4aebdf412bd0eeb800d8e47db854f9f9f7e2f5a0220440acf219ddfddd4f63"}, - {file = "pyarrow-12.0.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:e0d8730c7f6e893f6db5d5b86eda42c0a130842d101992b581e2138e4d5663d3"}, - {file = "pyarrow-12.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43364daec02f69fec89d2315f7fbfbeec956e0d991cbbef471681bd77875c40f"}, - {file = "pyarrow-12.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051f9f5ccf585f12d7de836e50965b3c235542cc896959320d9776ab93f3b33d"}, - {file = "pyarrow-12.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:be2757e9275875d2a9c6e6052ac7957fbbfc7bc7370e4a036a9b893e96fedaba"}, - {file = "pyarrow-12.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:cf812306d66f40f69e684300f7af5111c11f6e0d89d6b733e05a3de44961529d"}, - {file = "pyarrow-12.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:459a1c0ed2d68671188b2118c63bac91eaef6fc150c77ddd8a583e3c795737bf"}, - {file = "pyarrow-12.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85e705e33eaf666bbe508a16fd5ba27ca061e177916b7a317ba5a51bee43384c"}, - {file = "pyarrow-12.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9120c3eb2b1f6f516a3b7a9714ed860882d9ef98c4b17edcdc91d95b7528db60"}, - {file = "pyarrow-12.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:c780f4dc40460015d80fcd6a6140de80b615349ed68ef9adb653fe351778c9b3"}, - {file = "pyarrow-12.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a3c63124fc26bf5f95f508f5d04e1ece8cc23a8b0af2a1e6ab2b1ec3fdc91b24"}, - {file = "pyarrow-12.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b13329f79fa4472324f8d32dc1b1216616d09bd1e77cfb13104dec5463632c36"}, - {file = "pyarrow-12.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb656150d3d12ec1396f6dde542db1675a95c0cc8366d507347b0beed96e87ca"}, - {file = "pyarrow-12.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6251e38470da97a5b2e00de5c6a049149f7b2bd62f12fa5dbb9ac674119ba71a"}, - {file = "pyarrow-12.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3de26da901216149ce086920547dfff5cd22818c9eab67ebc41e863a5883bac7"}, - {file = "pyarrow-12.0.1.tar.gz", hash = "sha256:cce317fc96e5b71107bf1f9f184d5e54e2bd14bbf3f9a3d62819961f0af86fec"}, + {file = "pyarrow-13.0.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:1afcc2c33f31f6fb25c92d50a86b7a9f076d38acbcb6f9e74349636109550148"}, + {file = "pyarrow-13.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:70fa38cdc66b2fc1349a082987f2b499d51d072faaa6b600f71931150de2e0e3"}, + {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd57b13a6466822498238877892a9b287b0a58c2e81e4bdb0b596dbb151cbb73"}, + {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ce69f7bf01de2e2764e14df45b8404fc6f1a5ed9871e8e08a12169f87b7a26"}, + {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:588f0d2da6cf1b1680974d63be09a6530fd1bd825dc87f76e162404779a157dc"}, + {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6241afd72b628787b4abea39e238e3ff9f34165273fad306c7acf780dd850956"}, + {file = "pyarrow-13.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:fda7857e35993673fcda603c07d43889fca60a5b254052a462653f8656c64f44"}, + {file = "pyarrow-13.0.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:aac0ae0146a9bfa5e12d87dda89d9ef7c57a96210b899459fc2f785303dcbb67"}, + {file = "pyarrow-13.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d7759994217c86c161c6a8060509cfdf782b952163569606bb373828afdd82e8"}, + {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:868a073fd0ff6468ae7d869b5fc1f54de5c4255b37f44fb890385eb68b68f95d"}, + {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51be67e29f3cfcde263a113c28e96aa04362ed8229cb7c6e5f5c719003659d33"}, + {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:d1b4e7176443d12610874bb84d0060bf080f000ea9ed7c84b2801df851320295"}, + {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:69b6f9a089d116a82c3ed819eea8fe67dae6105f0d81eaf0fdd5e60d0c6e0944"}, + {file = "pyarrow-13.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:ab1268db81aeb241200e321e220e7cd769762f386f92f61b898352dd27e402ce"}, + {file = "pyarrow-13.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:ee7490f0f3f16a6c38f8c680949551053c8194e68de5046e6c288e396dccee80"}, + {file = "pyarrow-13.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3ad79455c197a36eefbd90ad4aa832bece7f830a64396c15c61a0985e337287"}, + {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68fcd2dc1b7d9310b29a15949cdd0cb9bc34b6de767aff979ebf546020bf0ba0"}, + {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc6fd330fd574c51d10638e63c0d00ab456498fc804c9d01f2a61b9264f2c5b2"}, + {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:e66442e084979a97bb66939e18f7b8709e4ac5f887e636aba29486ffbf373763"}, + {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:0f6eff839a9e40e9c5610d3ff8c5bdd2f10303408312caf4c8003285d0b49565"}, + {file = "pyarrow-13.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b30a27f1cddf5c6efcb67e598d7823a1e253d743d92ac32ec1eb4b6a1417867"}, + {file = "pyarrow-13.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:09552dad5cf3de2dc0aba1c7c4b470754c69bd821f5faafc3d774bedc3b04bb7"}, + {file = "pyarrow-13.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3896ae6c205d73ad192d2fc1489cd0edfab9f12867c85b4c277af4d37383c18c"}, + {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6647444b21cb5e68b593b970b2a9a07748dd74ea457c7dadaa15fd469c48ada1"}, + {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47663efc9c395e31d09c6aacfa860f4473815ad6804311c5433f7085415d62a7"}, + {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:b9ba6b6d34bd2563345488cf444510588ea42ad5613df3b3509f48eb80250afd"}, + {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:d00d374a5625beeb448a7fa23060df79adb596074beb3ddc1838adb647b6ef09"}, + {file = "pyarrow-13.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:c51afd87c35c8331b56f796eff954b9c7f8d4b7fef5903daf4e05fcf017d23a8"}, + {file = "pyarrow-13.0.0.tar.gz", hash = "sha256:83333726e83ed44b0ac94d8d7a21bbdee4a05029c3b1e8db58a863eec8fd8a33"}, ] [package.dependencies] @@ -1613,13 +1619,13 @@ files = [ [[package]] name = "pygments" -version = "2.15.1" +version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] [package.extras] @@ -1714,13 +1720,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2023.3" +version = "2023.3.post1" description = "World timezone definitions, modern and historical" optional = true python-versions = "*" files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] [[package]] @@ -1805,13 +1811,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.4.2" +version = "13.6.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.4.2-py3-none-any.whl", hash = "sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec"}, - {file = "rich-13.4.2.tar.gz", hash = "sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898"}, + {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, + {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, ] [package.dependencies] @@ -1834,7 +1840,7 @@ files = [ ] [package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.12\""} +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} [package.extras] docs = ["mercurial (>5.7)", "ryd"] @@ -1842,10 +1848,10 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] name = "ruamel-yaml-clib" -version = "0.2.7" +version = "0.2.8" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" optional = true -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, @@ -1887,19 +1893,19 @@ files = [ [[package]] name = "setuptools" -version = "68.0.0" +version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, - {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1914,13 +1920,13 @@ files = [ [[package]] name = "smmap" -version = "5.0.0" +version = "5.0.1" description = "A pure Python implementation of a sliding window memory map manager" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, ] [[package]] @@ -1936,13 +1942,13 @@ files = [ [[package]] name = "soupsieve" -version = "2.4.1" +version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, - {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] [[package]] @@ -2049,18 +2055,18 @@ Sphinx = "*" [[package]] name = "sphinx-rtd-theme" -version = "1.2.2" +version = "1.3.0" description = "Read the Docs theme for Sphinx" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "sphinx_rtd_theme-1.2.2-py2.py3-none-any.whl", hash = "sha256:6a7e7d8af34eb8fc57d52a09c6b6b9c46ff44aea5951bc831eeb9245378f3689"}, - {file = "sphinx_rtd_theme-1.2.2.tar.gz", hash = "sha256:01c5c5a72e2d025bd23d1f06c59a4831b06e6ce6c01fdd5ebfe9986c0a880fc7"}, + {file = "sphinx_rtd_theme-1.3.0-py2.py3-none-any.whl", hash = "sha256:46ddef89cc2416a81ecfbeaceab1881948c014b1b6e4450b815311a89fb977b0"}, + {file = "sphinx_rtd_theme-1.3.0.tar.gz", hash = "sha256:590b030c7abb9cf038ec053b95e5380b5c70d61591eb0b552063fbe7c41f0931"}, ] [package.dependencies] docutils = "<0.19" -sphinx = ">=1.6,<7" +sphinx = ">=1.6,<8" sphinxcontrib-jquery = ">=4,<5" [package.extras] @@ -2088,25 +2094,25 @@ testing = ["bs4", "coverage", "pygments", "pytest (>=7.1,<8)", "pytest-cov", "py [[package]] name = "sphinx-toolbox" -version = "3.4.0" +version = "3.5.0" description = "Box of handy tools for Sphinx 🧰 📔" optional = true python-versions = ">=3.7" files = [ - {file = "sphinx_toolbox-3.4.0-py3-none-any.whl", hash = "sha256:cdf70facee515a2d9406d568a253fa3e89f930fde23c4e8095ba0c675f7c0a48"}, - {file = "sphinx_toolbox-3.4.0.tar.gz", hash = "sha256:e1cf2a3dea5ce80e175a6a9cee8b5b2792240ecf6c28993d87a63b6fcf606293"}, + {file = "sphinx_toolbox-3.5.0-py3-none-any.whl", hash = "sha256:20dfd3566717db6f2da7a400a54dc4b946f064fb31250fa44802d54cfb9b8a03"}, + {file = "sphinx_toolbox-3.5.0.tar.gz", hash = "sha256:e5b5a7153f1997572d71a06aaf6cec225483492ec2c60097a84f15aad6df18b7"}, ] [package.dependencies] apeye = ">=0.4.0" autodocsumm = ">=0.2.0" beautifulsoup4 = ">=4.9.1" -cachecontrol = {version = ">=0.12.6", extras = ["filecache"]} +cachecontrol = {version = ">=0.13.0", extras = ["filecache"]} dict2css = ">=0.2.3" -docutils = ">=0.16,<0.19" +docutils = ">=0.16" domdf-python-tools = ">=2.9.0" +filelock = ">=3.8.0" html5lib = ">=1.1" -lockfile = ">=0.12.2" "ruamel.yaml" = ">=0.16.12" sphinx = ">=3.2.0" sphinx-autodoc-typehints = ">=1.11.1" @@ -2275,33 +2281,33 @@ files = [ [[package]] name = "tornado" -version = "6.3.2" +version = "6.3.3" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = true python-versions = ">= 3.8" files = [ - {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:c367ab6c0393d71171123ca5515c61ff62fe09024fa6bf299cd1339dc9456829"}, - {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b46a6ab20f5c7c1cb949c72c1994a4585d2eaa0be4853f50a03b5031e964fc7c"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2de14066c4a38b4ecbbcd55c5cc4b5340eb04f1c5e81da7451ef555859c833f"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05615096845cf50a895026f749195bf0b10b8909f9be672f50b0fe69cba368e4"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b17b1cf5f8354efa3d37c6e28fdfd9c1c1e5122f2cb56dac121ac61baa47cbe"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:29e71c847a35f6e10ca3b5c2990a52ce38b233019d8e858b755ea6ce4dcdd19d"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:834ae7540ad3a83199a8da8f9f2d383e3c3d5130a328889e4cc991acc81e87a0"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6a0848f1aea0d196a7c4f6772197cbe2abc4266f836b0aac76947872cd29b411"}, - {file = "tornado-6.3.2-cp38-abi3-win32.whl", hash = "sha256:7efcbcc30b7c654eb6a8c9c9da787a851c18f8ccd4a5a3a95b05c7accfa068d2"}, - {file = "tornado-6.3.2-cp38-abi3-win_amd64.whl", hash = "sha256:0c325e66c8123c606eea33084976c832aa4e766b7dff8aedd7587ea44a604cdf"}, - {file = "tornado-6.3.2.tar.gz", hash = "sha256:4b927c4f19b71e627b13f3db2324e4ae660527143f9e1f2e2fb404f3a187e2ba"}, + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"}, + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"}, + {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"}, + {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"}, + {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"}, ] [[package]] name = "types-pytz" -version = "2023.3.0.0" +version = "2023.3.1.1" description = "Typing stubs for pytz" optional = false python-versions = "*" files = [ - {file = "types-pytz-2023.3.0.0.tar.gz", hash = "sha256:ecdc70d543aaf3616a7e48631543a884f74205f284cefd6649ddf44c6a820aac"}, - {file = "types_pytz-2023.3.0.0-py3-none-any.whl", hash = "sha256:4fc2a7fbbc315f0b6630e0b899fd6c743705abe1094d007b0e612d10da15e0f3"}, + {file = "types-pytz-2023.3.1.1.tar.gz", hash = "sha256:cc23d0192cd49c8f6bba44ee0c81e4586a8f30204970fc0894d209a6b08dab9a"}, + {file = "types_pytz-2023.3.1.1-py3-none-any.whl", hash = "sha256:1999a123a3dc0e39a2ef6d19f3f8584211de9e6a77fe7a0259f04a524e90a5cf"}, ] [[package]] @@ -2320,7 +2326,7 @@ name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, diff --git a/src/patito/polars.py b/src/patito/polars.py index 0727eb2..2bf210a 100644 --- a/src/patito/polars.py +++ b/src/patito/polars.py @@ -68,14 +68,8 @@ def _construct_lazyframe_model_class( def collect( self, - type_coercion: bool = True, - predicate_pushdown: bool = True, - projection_pushdown: bool = True, - simplify_expression: bool = True, - no_optimization: bool = False, - slice_pushdown: bool = True, - common_subplan_elimination: bool = True, - streaming: bool = False, + *args, + **kwargs, ) -> "DataFrame[ModelType]": # noqa: DAR101, DAR201 """ Collect into a DataFrame. @@ -83,16 +77,7 @@ def collect( See documentation of polars.DataFrame.collect for full description of parameters. """ - df = super().collect( - type_coercion=type_coercion, - predicate_pushdown=predicate_pushdown, - projection_pushdown=projection_pushdown, - simplify_expression=simplify_expression, - no_optimization=no_optimization, - slice_pushdown=slice_pushdown, - common_subplan_elimination=common_subplan_elimination, - streaming=streaming, - ) + df = super().collect(*args, **kwargs) if getattr(self, "model", False): cls = DataFrame._construct_dataframe_model_class(model=self.model) else: From 87d57bdcbf1ae8158d080e1c6ab3ceb828366666 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Thu, 26 Oct 2023 12:48:19 -0400 Subject: [PATCH 06/29] wip: restructure model reconstitution routines, all tests passing --- src/patito/pydantic.py | 80 +++++++++++++++++++++++----------------- tests/test_model.py | 51 ++++++++++++++++++------- tests/test_validators.py | 6 +-- 3 files changed, 85 insertions(+), 52 deletions(-) diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 9376c14..8c2d2f3 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -19,6 +19,7 @@ Literal, get_args, Sequence, + Tuple, ) import polars as pl @@ -77,7 +78,7 @@ class ModelMetaclass(PydanticModelMetaclass): Responsible for setting any relevant model-dependent class properties. """ - def __init__(cls, name: str, bases: tuple, clsdict: dict) -> None: + def __init__(cls, name: str, bases: tuple, clsdict: dict, **kwargs) -> None: """ Construct new patito model. @@ -224,6 +225,7 @@ def _valid_dtypes( # noqa: C901 elif "type" not in props: if 'anyOf' in props: res = [cls._valid_dtypes(column, sub_props) for sub_props in props['anyOf']] + res = [x for x in res if x is not None] return list(itertools.chain.from_iterable(res)) elif 'const' in props: return cls._valid_dtypes(column, {'type': PYTHON_TO_PYDANTIC_TYPES.get(type(props['const']))}) @@ -255,6 +257,10 @@ def _valid_dtypes( # noqa: C901 # TODO: Find out why this branch is not being hit elif string_format == "date-time": # pragma: no cover return [pl.Datetime] + elif string_format == "duration": + return [pl.Duration] + elif string_format.startswith("uuid"): + return [pl.Object] else: return None # pragma: no cover elif props["type"] == "null": @@ -674,7 +680,7 @@ def _from_polars( if validate: return cls(**dataframe.to_dicts()[0]) else: - return cls.construct(**dataframe.to_dicts()[0]) + return cls.model_construct(**dataframe.to_dicts()[0]) @classmethod def validate( @@ -758,7 +764,14 @@ def example_value( # noqa: C901 """ field_data = cls._schema_properties() properties = field_data[field] - field_type = properties["type"] + if "type" in properties: + field_type = properties["type"] + elif "anyOf" in properties: + allowable = [x['type'] for x in properties['anyOf'] if 'type' in x] + if 'null' in allowable: + field_type = 'null' + else: + field_type = allowable[0] if "const" in properties: # The default value is the only valid value, provided as const return properties["const"] @@ -769,6 +782,9 @@ def example_value( # noqa: C901 elif not properties["required"]: return None + + elif field_type == 'null': + return None elif "enum" in properties: return properties["enum"][0] @@ -1104,22 +1120,9 @@ def join( (cls, {"outer"}), (other, {"left", "outer", "asof"}), ): - # TODO PYDANTIC V2, not sure how to implement this: - # old_field.required no longer exists, maybe this needs to be - # completely rewritten. See fields at - # https://docs.pydantic.dev/latest/api/fields/#pydantic.fields.FieldInfo - for field_name, field in model.__fields__.items(): - field_type = field.type_ - field_default = field.default - if how in nullable_methods and type(None) not in get_args(field.type_): - # This originally non-nullable field has become nullable - field_type = Optional[field_type] - elif field.required and field_default is None: - # We need to replace Pydantic's None default value with ... in order - # to make it clear that the field is still non-nullable and - # required. - field_default = ... - kwargs[field_name] = (field_type, field_default) + for field_name, field in model.model_fields.items(): + make_nullable = how in nullable_methods and type(None) not in get_args(field.annotation) + kwargs[field_name] = cls._derive_field(field, make_nullable=make_nullable) return create_model( f"{cls.__name__}{how.capitalize()}Join{other.__name__}", @@ -1406,31 +1409,40 @@ def _derive_model( """ new_fields = {} for new_field_name, field_definition in field_mapping.items(): - # TODO PYDANTIC V2, not sure how to implement this: - # old_field.required no longer exists, maybe this needs to be - # completely rewritten. See fields at - # https://docs.pydantic.dev/latest/api/fields/#pydantic.fields.FieldInfo if isinstance(field_definition, str): # A single string, interpreted as the name of a field on the existing # model. - old_field = cls.__fields__[field_definition] - field_type = old_field.type_ - field_default = old_field.default - if old_field.required and field_default is None: - # The default None value needs to be replaced with ... in order to - # make the field required in the new model. - field_default = ... - new_fields[new_field_name] = (field_type, field_default) + old_field = cls.model_fields[field_definition] + new_fields[new_field_name] = cls._derive_field(old_field) else: # We have been given a (field_type, field_default) tuple defining the # new field directly. - new_fields[new_field_name] = field_definition + field_type = field_definition[0] + if field_definition[1] is None and type(None) not in get_args(field_type): + field_type = Optional[field_type] + new_fields[new_field_name] = (field_type, field_definition[1]) return create_model( # type: ignore __model_name=model_name, - __validators__={"__validators__": cls.__validators__}, __base__=Model, **new_fields, ) + + @staticmethod + def _derive_field(field: FieldInfo, make_nullable: bool = False) -> Tuple[Type, FieldInfo]: + field_type = field.annotation + default = field.default + extra_attrs = {x: getattr(field, x) for x in field._attributes_set if x in field.__slots__ and x not in ['annotation', 'default']} + if make_nullable: + # This originally non-nullable field has become nullable + field_type = Optional[field_type] + elif field.is_required() and default is None: + # We need to replace Pydantic's None default value with ... in order + # to make it clear that the field is still non-nullable and + # required. + default = ... + field_new = Field(default=default, **extra_attrs) + field_new.metadata = field.metadata + return field_type, field_new PT_INFO = Literal["constraints", "derived_from", "dtype", "unique"] @@ -1451,10 +1463,12 @@ def __init__( **kwargs, ): super().__init__(**kwargs) + self.constraints = constraints self.derived_from = derived_from self.dtype = dtype self.unique = unique + self._attributes_set.update(**{k: getattr(self, k) for k in get_args(PT_INFO) if getattr(self, k) is not None}) def Field( diff --git a/tests/test_model.py b/tests/test_model.py index 78a3fcd..eefbfae 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -29,7 +29,7 @@ class MyModel(pt.Model): date_value: date datetime_value: datetime - assert MyModel.example().dict() == { + assert MyModel.example().model_dump() == { "int_value": -1, "float_value": -0.5, "str_value": "dummy_string", @@ -45,7 +45,7 @@ class MyModel(pt.Model): bool_value=True, default_value="override", optional_value=1, - ).dict() == { + ).model_dump() == { "int_value": -1, "float_value": -0.5, "str_value": "dummy_string", @@ -60,7 +60,7 @@ class MyModel(pt.Model): # For now, valid regex data is not implemented class RegexModel(pt.Model): - regex_column: str = pt.Field(regex=r"[0-9a-f]") + regex_column: str = pt.Field(pattern=r"[0-9a-f]") with pytest.raises( NotImplementedError, @@ -274,21 +274,18 @@ def test_model_joins(): class Left(pt.Model): left: int = pt.Field(gt=20) - opt_left: Optional[int] + opt_left: Optional[int] = None class Right(pt.Model): right: int = pt.Field(gt=20) - opt_right: Optional[int] + opt_right: Optional[int] = None def test_model_validator(model: Type[pt.Model]) -> None: """Test if all field validators have been included correctly.""" - with pytest.raises( - ValidationError, - match=re.compile( - r".*limit_value=20.*\n.*\n.*limit_value=20.*", re.MULTILINE - ), - ): + with pytest.raises(ValidationError) as e: model(left=1, opt_left=1, right=1, opt_right=1) + pattern = re.compile(r'Input should be greater than 20') + assert len(pattern.findall(str(e.value))) == 2 # An inner join should keep nullability information InnerJoinModel = Left.join(Right, how="inner") @@ -326,13 +323,13 @@ class MyModel(pt.Model): MySubModel = MyModel.select("b") assert MySubModel.columns == ["b"] MySubModel(b=11) - with pytest.raises(ValidationError, match="limit_value=10"): + with pytest.raises(ValidationError, match="Input should be greater than 10"): MySubModel(b=1) MyTotalModel = MyModel.select(["a", "b"]) assert sorted(MyTotalModel.columns) == ["a", "b"] MyTotalModel(a=1, b=11) - with pytest.raises(ValidationError, match="limit_value=10"): + with pytest.raises(ValidationError, match="Input should be greater than 10"): MyTotalModel(a=1, b=1) assert MyTotalModel.nullable_columns == {"a"} @@ -391,7 +388,7 @@ class MyModel(pt.Model): ExpandedModel = MyModel.with_fields( b=(int, ...), - c=(int, None), + c=(int, None), # TODO should this be nullable if not specified as optional? d=(int, pt.Field(gt=10)), e=(Optional[int], None), ) @@ -427,3 +424,29 @@ class InvalidEnumModel(pt.Model): assert EnumModel.sql_types["column"].startswith("enum__") with pytest.raises(TypeError, match=r".*Encountered types: \['int', 'str'\]\."): InvalidEnumModel.sql_types + +def test_pt_fields(): + + class Model(pt.Model): + a: int + b: int = pt.Field(constraints=[(pl.col("b") < 10)]) + c: int = pt.Field(derived_from=pl.col("a") + pl.col("b")) + d: int = pt.Field(dtype=pl.UInt8) + e: int = pt.Field(unique=True) + + schema = Model.model_json_schema() # no serialization issues + props = Model._schema_properties() # extra fields are stored in modified schema_properties + assert 'constraints' in props['b'] + assert 'derived_from' in props['c'] + assert 'dtype' in props['d'] + assert 'unique' in props['e'] + + fields = Model.model_fields # attributes are properly set and catalogued on the `FieldInfo` objects + assert 'constraints' in fields['b']._attributes_set + assert fields['b'].constraints is not None + assert 'derived_from' in fields['c']._attributes_set + assert fields['c'].derived_from is not None + assert 'dtype' in fields['d']._attributes_set + assert fields['d'].dtype is not None + assert 'unique' in fields['e']._attributes_set + assert fields['e'].unique is not None \ No newline at end of file diff --git a/tests/test_validators.py b/tests/test_validators.py index 0e68e59..e96ecf3 100644 --- a/tests/test_validators.py +++ b/tests/test_validators.py @@ -600,8 +600,4 @@ class ListModel(pt.Model): ]: # print(old, new) with pytest.raises(DataFrameValidationError): - ListModel.validate(valid_df.with_columns(pl.col(old).alias(new))) - - -if __name__ == "__main__": - test_custom_constraint_validation() \ No newline at end of file + ListModel.validate(valid_df.with_columns(pl.col(old).alias(new))) \ No newline at end of file From ffe343d9781d5f6e884813a655ab7cc6960319dc Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Fri, 27 Oct 2023 09:40:46 -0400 Subject: [PATCH 07/29] chore: lockfile and formatting chore: cleanup init chore: cleanup init chore: more misc cleanup cleanup json_schema_extras --- poetry.lock | 38 ++++++------- src/patito/__init__.py | 4 +- src/patito/exceptions.py | 115 +++++++++++++++++++++++---------------- src/patito/pydantic.py | 88 +++++++++++++++++++----------- src/patito/validators.py | 1 - tests/test_dummy_data.py | 10 ++-- tests/test_model.py | 42 +++++++------- tests/test_polars.py | 6 +- tests/test_validators.py | 15 +++-- 9 files changed, 182 insertions(+), 137 deletions(-) diff --git a/poetry.lock b/poetry.lock index e5e4b77..23d76e5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -452,18 +452,18 @@ toml = ["tomli"] [[package]] name = "cssutils" -version = "2.7.1" +version = "2.9.0" description = "A CSS Cascading Style Sheets library for Python" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "cssutils-2.7.1-py3-none-any.whl", hash = "sha256:1e92e0d9dab2ec8af9f38d715393964ba533dc3beacab9b072511dfc241db775"}, - {file = "cssutils-2.7.1.tar.gz", hash = "sha256:340ecfd9835d21df8f98500f0dfcea0aee41cb4e19ecbc2cf94f0a6d36d7cb6c"}, + {file = "cssutils-2.9.0-py3-none-any.whl", hash = "sha256:f8b013169e281c0c6083207366c5005f5dd4549055f7aba840384fb06a78745c"}, + {file = "cssutils-2.9.0.tar.gz", hash = "sha256:89477b3d17d790e97b9fb4def708767061055795aae6f7c82ae32e967c9be4cd"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["cssselect", "importlib-resources", "jaraco.test (>=5.1)", "lxml", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["cssselect", "importlib-resources", "jaraco.test (>=5.1)", "lxml", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [[package]] name = "dict2css" @@ -1911,7 +1911,7 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -optional = false +optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -2412,30 +2412,26 @@ files = [ [[package]] name = "xdoctest" -version = "1.1.1" +version = "1.1.2" description = "A rewrite of the builtin doctest module" optional = false python-versions = ">=3.6" files = [ - {file = "xdoctest-1.1.1-py3-none-any.whl", hash = "sha256:d59d4ed91cb92e4430ef0ad1b134a2bef02adff7d2fb9c9f057547bee44081a2"}, - {file = "xdoctest-1.1.1.tar.gz", hash = "sha256:2eac8131bdcdf2781b4e5a62d6de87f044b730cc8db8af142a51bb29c245e779"}, + {file = "xdoctest-1.1.2-py3-none-any.whl", hash = "sha256:ebe133222534f09597cbe461f97cc5f95ad7b36e5d31f3437caffb9baaddbddb"}, + {file = "xdoctest-1.1.2.tar.gz", hash = "sha256:267d3d4e362547fa917d3deabaf6888232bbf43c8d30298faeb957dbfa7e0ba3"}, ] -[package.dependencies] -six = "*" - [package.extras] -all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "pytest", "pytest", "pytest", "pytest-cov", "six", "tomli", "typing"] -all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "six (==1.11.0)", "tomli (==0.2.0)", "typing (==3.7.4)"] +all = ["IPython (>=7.10.0)", "IPython (>=7.23.1)", "Pygments (>=2.0.0)", "Pygments (>=2.4.1)", "attrs (>=19.2.0)", "colorama (>=0.4.1)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.3.0)", "debugpy (>=1.6.0)", "ipykernel (>=5.2.0)", "ipykernel (>=6.0.0)", "ipykernel (>=6.11.0)", "ipython-genutils (>=0.2.0)", "jedi (>=0.16)", "jinja2 (>=3.0.0)", "jupyter-client (>=6.1.5)", "jupyter-client (>=7.0.0)", "jupyter-core (>=4.7.0)", "nbconvert (>=6.0.0)", "nbconvert (>=6.1.0)", "pyflakes (>=2.2.0)", "pytest (>=4.6.0)", "pytest (>=4.6.0)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "tomli (>=0.2.0)", "typing (>=3.7.4)"] +all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipykernel (==6.11.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "nbconvert (==6.1.0)", "pyflakes (==2.2.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "tomli (==0.2.0)", "typing (==3.7.4)"] colors = ["Pygments", "Pygments", "colorama"] -jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert"] -optional = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "tomli"] -optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"] -runtime-strict = ["six (==1.11.0)"] -tests = ["codecov", "pytest", "pytest", "pytest", "pytest-cov", "typing"] +jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "nbconvert"] +optional = ["IPython (>=7.10.0)", "IPython (>=7.23.1)", "Pygments (>=2.0.0)", "Pygments (>=2.4.1)", "attrs (>=19.2.0)", "colorama (>=0.4.1)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.3.0)", "debugpy (>=1.6.0)", "ipykernel (>=5.2.0)", "ipykernel (>=6.0.0)", "ipykernel (>=6.11.0)", "ipython-genutils (>=0.2.0)", "jedi (>=0.16)", "jinja2 (>=3.0.0)", "jupyter-client (>=6.1.5)", "jupyter-client (>=7.0.0)", "jupyter-core (>=4.7.0)", "nbconvert (>=6.0.0)", "nbconvert (>=6.1.0)", "pyflakes (>=2.2.0)", "tomli (>=0.2.0)"] +optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipykernel (==6.11.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "nbconvert (==6.1.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"] +tests = ["pytest (>=4.6.0)", "pytest (>=4.6.0)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "typing (>=3.7.4)"] tests-binary = ["cmake", "cmake", "ninja", "ninja", "pybind11", "pybind11", "scikit-build", "scikit-build"] tests-binary-strict = ["cmake (==3.21.2)", "cmake (==3.25.0)", "ninja (==1.10.2)", "ninja (==1.11.1)", "pybind11 (==2.10.3)", "pybind11 (==2.7.1)", "scikit-build (==0.11.1)", "scikit-build (==0.16.1)"] -tests-strict = ["codecov (==2.0.15)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "typing (==3.7.4)"] +tests-strict = ["pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "typing (==3.7.4)"] [[package]] name = "zipp" diff --git a/src/patito/__init__.py b/src/patito/__init__.py index a71b314..1ee3f5c 100644 --- a/src/patito/__init__.py +++ b/src/patito/__init__.py @@ -2,21 +2,21 @@ from polars import Expr, Series, col from patito import exceptions, sql -# from patito.exceptions import ValidationError from patito.polars import DataFrame, LazyFrame from patito.pydantic import Field, Model +from patito.exceptions import DataFrameValidationError _CACHING_AVAILABLE = False _DUCKDB_AVAILABLE = False field = col("_") __all__ = [ "DataFrame", + "DataFrameValidationError", "Expr", "Field", "LazyFrame", "Model", "Series", - # "ValidationError", "_CACHING_AVAILABLE", "_DUCKDB_AVAILABLE", "col", diff --git a/src/patito/exceptions.py b/src/patito/exceptions.py index 105af29..7240c55 100644 --- a/src/patito/exceptions.py +++ b/src/patito/exceptions.py @@ -1,5 +1,19 @@ import json -from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Sequence, Tuple, Type, Union, Callable, TypedDict, Iterable +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Generator, + List, + Optional, + Sequence, + Tuple, + Type, + Union, + Callable, + TypedDict, + Iterable, +) if TYPE_CHECKING: from pydantic import BaseModel @@ -16,10 +30,12 @@ class ErrorDict(_ErrorDictRequired, total=False): Loc = Tuple[Union[int, str], ...] ReprArgs = Sequence[Tuple[Optional[str], Any]] - RichReprResult = Iterable[Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]]] + RichReprResult = Iterable[ + Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]] + ] -__all__ = 'ErrorWrapper', 'ValidationError' +__all__ = "ErrorWrapper", "ValidationError" class Representation: @@ -32,7 +48,7 @@ class Representation: __slots__: Tuple[str, ...] = tuple() - def __repr_args__(self) -> 'ReprArgs': + def __repr_args__(self) -> "ReprArgs": """ Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden. @@ -50,30 +66,34 @@ def __repr_name__(self) -> str: return self.__class__.__name__ def __repr_str__(self, join_str: str) -> str: - return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__()) + return join_str.join( + repr(v) if a is None else f"{a}={v!r}" for a, v in self.__repr_args__() + ) - def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any, None, None]: + def __pretty__( + self, fmt: Callable[[Any], Any], **kwargs: Any + ) -> Generator[Any, None, None]: """ Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects """ - yield self.__repr_name__() + '(' + yield self.__repr_name__() + "(" yield 1 for name, value in self.__repr_args__(): if name is not None: - yield name + '=' + yield name + "=" yield fmt(value) - yield ',' + yield "," yield 0 yield -1 - yield ')' + yield ")" def __str__(self) -> str: - return self.__repr_str__(' ') + return self.__repr_str__(" ") def __repr__(self) -> str: return f'{self.__repr_name__()}({self.__repr_str__(", ")})' - def __rich_repr__(self) -> 'RichReprResult': + def __rich_repr__(self) -> "RichReprResult": """Get fields for Rich library""" for name, field_repr in self.__repr_args__(): if name is None: @@ -81,21 +101,22 @@ def __rich_repr__(self) -> 'RichReprResult': else: yield name, field_repr + class ErrorWrapper(Representation): - __slots__ = 'exc', '_loc' + __slots__ = "exc", "_loc" - def __init__(self, exc: Exception, loc: Union[str, 'Loc']) -> None: + def __init__(self, exc: Exception, loc: Union[str, "Loc"]) -> None: self.exc = exc self._loc = loc - def loc_tuple(self) -> 'Loc': + def loc_tuple(self) -> "Loc": if isinstance(self._loc, tuple): return self._loc else: return (self._loc,) - def __repr_args__(self) -> 'ReprArgs': - return [('exc', self.exc), ('loc', self.loc_tuple())] + def __repr_args__(self) -> "ReprArgs": + return [("exc", self.exc), ("loc", self.loc_tuple())] # ErrorList is something like Union[List[Union[List[ErrorWrapper], ErrorWrapper]], ErrorWrapper] @@ -104,56 +125,55 @@ def __repr_args__(self) -> 'ReprArgs': class DataFrameValidationError(Representation, ValueError): - __slots__ = 'raw_errors', 'model', '_error_cache' + __slots__ = "raw_errors", "model", "_error_cache" - def __init__(self, errors: Sequence[ErrorList], model: 'BaseModel') -> None: + def __init__(self, errors: Sequence[ErrorList], model: "BaseModel") -> None: self.raw_errors = errors self.model = model - self._error_cache: Optional[List['ErrorDict']] = None + self._error_cache: Optional[List["ErrorDict"]] = None - def errors(self) -> List['ErrorDict']: + def errors(self) -> List["ErrorDict"]: if self._error_cache is None: self._error_cache = list(flatten_errors(self.raw_errors)) return self._error_cache - # def json(self, *, indent: Union[None, int, str] = 2) -> str: - # return json.dumps(self.errors(), indent=indent, default=pydantic_encoder) - def __str__(self) -> str: errors = self.errors() no_errors = len(errors) return ( f'{no_errors} validation error{"" if no_errors == 1 else "s"} for {self.model.__name__}\n' - f'{display_errors(errors)}' + f"{display_errors(errors)}" ) - def __repr_args__(self) -> 'ReprArgs': - return [('model', self.model.__name__), ('errors', self.errors())] + def __repr_args__(self) -> "ReprArgs": + return [("model", self.model.__name__), ("errors", self.errors())] -def display_errors(errors: List['ErrorDict']) -> str: - return '\n'.join(f'{_display_error_loc(e)}\n {e["msg"]} ({_display_error_type_and_ctx(e)})' for e in errors) +def display_errors(errors: List["ErrorDict"]) -> str: + return "\n".join( + f'{_display_error_loc(e)}\n {e["msg"]} ({_display_error_type_and_ctx(e)})' + for e in errors + ) -def _display_error_loc(error: 'ErrorDict') -> str: - return ' -> '.join(str(e) for e in error['loc']) +def _display_error_loc(error: "ErrorDict") -> str: + return " -> ".join(str(e) for e in error["loc"]) -def _display_error_type_and_ctx(error: 'ErrorDict') -> str: - t = 'type=' + error['type'] - ctx = error.get('ctx') +def _display_error_type_and_ctx(error: "ErrorDict") -> str: + t = "type=" + error["type"] + ctx = error.get("ctx") if ctx: - return t + ''.join(f'; {k}={v}' for k, v in ctx.items()) + return t + "".join(f"; {k}={v}" for k, v in ctx.items()) else: return t def flatten_errors( - errors: Sequence[Any], loc: Optional['Loc'] = None -) -> Generator['ErrorDict', None, None]: + errors: Sequence[Any], loc: Optional["Loc"] = None +) -> Generator["ErrorDict", None, None]: for error in errors: if isinstance(error, ErrorWrapper): - if loc: error_loc = loc + error.loc_tuple() else: @@ -166,22 +186,22 @@ def flatten_errors( elif isinstance(error, list): yield from flatten_errors(error, loc=loc) else: - raise RuntimeError(f'Unknown error object: {error}') + raise RuntimeError(f"Unknown error object: {error}") -def error_dict(exc: Exception, loc: 'Loc') -> 'ErrorDict': +def error_dict(exc: Exception, loc: "Loc") -> "ErrorDict": type_ = get_exc_type(exc.__class__) - msg_template = getattr(exc, 'msg_template', None) + msg_template = getattr(exc, "msg_template", None) ctx = exc.__dict__ if msg_template: msg = msg_template.format(**ctx) else: msg = str(exc) - d: 'ErrorDict' = {'loc': loc, 'msg': msg, 'type': type_} + d: "ErrorDict" = {"loc": loc, "msg": msg, "type": type_} if ctx: - d['ctx'] = ctx + d["ctx"] = ctx return d @@ -201,17 +221,18 @@ def get_exc_type(cls: Type[Exception]) -> str: def _get_exc_type(cls: Type[Exception]) -> str: if issubclass(cls, AssertionError): - return 'assertion_error' + return "assertion_error" - base_name = 'type_error' if issubclass(cls, TypeError) else 'value_error' + base_name = "type_error" if issubclass(cls, TypeError) else "value_error" if cls in (TypeError, ValueError): # just TypeError or ValueError, no extra code return base_name # if it's not a TypeError or ValueError, we just take the lowercase of the exception name # no chaining or snake case logic, use "code" for more complex error types. - code = getattr(cls, 'code', None) or cls.__name__.replace('Error', '').lower() - return base_name + '.' + code + code = getattr(cls, "code", None) or cls.__name__.replace("Error", "").lower() + return base_name + "." + code + class WrongColumnsError(TypeError): """Validation exception for column name mismatches.""" diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 8c2d2f3..1434607 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -16,7 +16,7 @@ TypeVar, Union, cast, - Literal, + Literal, get_args, Sequence, Tuple, @@ -26,7 +26,9 @@ from polars.datatypes import PolarsDataType from pydantic import fields from pydantic import ConfigDict, BaseModel, create_model # noqa: F401 -from pydantic._internal._model_construction import ModelMetaclass as PydanticModelMetaclass +from pydantic._internal._model_construction import ( + ModelMetaclass as PydanticModelMetaclass, +) from patito.polars import DataFrame, LazyFrame from patito.validators import validate @@ -223,12 +225,16 @@ def _valid_dtypes( # noqa: C901 elif "enum" in props and props["type"] == "string": return [pl.Categorical, pl.Utf8] elif "type" not in props: - if 'anyOf' in props: - res = [cls._valid_dtypes(column, sub_props) for sub_props in props['anyOf']] + if "anyOf" in props: + res = [ + cls._valid_dtypes(column, sub_props) for sub_props in props["anyOf"] + ] res = [x for x in res if x is not None] return list(itertools.chain.from_iterable(res)) - elif 'const' in props: - return cls._valid_dtypes(column, {'type': PYTHON_TO_PYDANTIC_TYPES.get(type(props['const']))}) + elif "const" in props: + return cls._valid_dtypes( + column, {"type": PYTHON_TO_PYDANTIC_TYPES.get(type(props["const"]))} + ) return None elif props["type"] == "integer": return [ @@ -483,7 +489,9 @@ def non_nullable_columns( # type: ignore >>> sorted(MyModel.non_nullable_columns) ['another_non_nullable_field', 'non_nullable_field'] """ - return set(k for k in cls.valid_dtypes.keys() if pl.Null not in cls.valid_dtypes[k]) + return set( + k for k in cls.valid_dtypes.keys() if pl.Null not in cls.valid_dtypes[k] + ) @property def nullable_columns( # type: ignore @@ -767,9 +775,9 @@ def example_value( # noqa: C901 if "type" in properties: field_type = properties["type"] elif "anyOf" in properties: - allowable = [x['type'] for x in properties['anyOf'] if 'type' in x] - if 'null' in allowable: - field_type = 'null' + allowable = [x["type"] for x in properties["anyOf"] if "type" in x] + if "null" in allowable: + field_type = "null" else: field_type = allowable[0] if "const" in properties: @@ -782,8 +790,8 @@ def example_value( # noqa: C901 elif not properties["required"]: return None - - elif field_type == 'null': + + elif field_type == "null": return None elif "enum" in properties: @@ -1121,8 +1129,12 @@ def join( (other, {"left", "outer", "asof"}), ): for field_name, field in model.model_fields.items(): - make_nullable = how in nullable_methods and type(None) not in get_args(field.annotation) - kwargs[field_name] = cls._derive_field(field, make_nullable=make_nullable) + make_nullable = how in nullable_methods and type(None) not in get_args( + field.annotation + ) + kwargs[field_name] = cls._derive_field( + field, make_nullable=make_nullable + ) return create_model( f"{cls.__name__}{how.capitalize()}Join{other.__name__}", @@ -1378,8 +1390,10 @@ def _schema_properties(cls) -> Dict[str, Dict[str, Any]]: else: fields[field_name] = field_info fields[field_name]["required"] = field_name in required - if 'const' in field_info and 'type' not in field_info: - fields[field_name]['type'] = PYTHON_TO_PYDANTIC_TYPES[type(field_info['const'])] + if "const" in field_info and "type" not in field_info: + fields[field_name]["type"] = PYTHON_TO_PYDANTIC_TYPES[ + type(field_info["const"]) + ] for f in get_args(PT_INFO): v = getattr(cls.model_fields[field_name], f, None) if v is not None: @@ -1418,7 +1432,9 @@ def _derive_model( # We have been given a (field_type, field_default) tuple defining the # new field directly. field_type = field_definition[0] - if field_definition[1] is None and type(None) not in get_args(field_type): + if field_definition[1] is None and type(None) not in get_args( + field_type + ): field_type = Optional[field_type] new_fields[new_field_name] = (field_type, field_definition[1]) return create_model( # type: ignore @@ -1428,10 +1444,16 @@ def _derive_model( ) @staticmethod - def _derive_field(field: FieldInfo, make_nullable: bool = False) -> Tuple[Type, FieldInfo]: + def _derive_field( + field: FieldInfo, make_nullable: bool = False + ) -> Tuple[Type, FieldInfo]: field_type = field.annotation default = field.default - extra_attrs = {x: getattr(field, x) for x in field._attributes_set if x in field.__slots__ and x not in ['annotation', 'default']} + extra_attrs = { + x: getattr(field, x) + for x in field._attributes_set + if x in field.__slots__ and x not in ["annotation", "default"] + } if make_nullable: # This originally non-nullable field has become nullable field_type = Optional[field_type] @@ -1443,19 +1465,21 @@ def _derive_field(field: FieldInfo, make_nullable: bool = False) -> Tuple[Type, field_new = Field(default=default, **extra_attrs) field_new.metadata = field.metadata return field_type, field_new - + + PT_INFO = Literal["constraints", "derived_from", "dtype", "unique"] + class FieldInfo(fields.FieldInfo): - __slots__ = getattr(fields.FieldInfo, '__slots__') + ( + __slots__ = getattr(fields.FieldInfo, "__slots__") + ( "constraints", "derived_from", "dtype", "unique", ) - + def __init__( - self, + self, constraints: Optional[Union[pl.Expr, Sequence[pl.Expr]]] = None, derived_from: Optional[Union[str, pl.Expr]] = None, dtype: Optional[pl.DataType] = None, @@ -1463,30 +1487,32 @@ def __init__( **kwargs, ): super().__init__(**kwargs) - + self.constraints = constraints self.derived_from = derived_from self.dtype = dtype self.unique = unique - self._attributes_set.update(**{k: getattr(self, k) for k in get_args(PT_INFO) if getattr(self, k) is not None}) + self._attributes_set.update( + **{ + k: getattr(self, k) + for k in get_args(PT_INFO) + if getattr(self, k) is not None + } + ) def Field( *args, **kwargs, ): - pt_kwargs = { - k: kwargs.pop(k, None) for k in get_args(PT_INFO) - } + pt_kwargs = {k: kwargs.pop(k, None) for k in get_args(PT_INFO)} meta_kwargs = { k: v for k, v in kwargs.items() if k in fields.FieldInfo.metadata_lookup } base_kwargs = { k: v for k, v in kwargs.items() if k not in {**pt_kwargs, **meta_kwargs} } - finfo = fields.Field( - *args, **base_kwargs - ) + finfo = fields.Field(*args, **base_kwargs) return FieldInfo( **finfo._attributes_set, **meta_kwargs, diff --git a/src/patito/validators.py b/src/patito/validators.py index 357c7ab..09cbd07 100644 --- a/src/patito/validators.py +++ b/src/patito/validators.py @@ -316,4 +316,3 @@ def validate( errors = _find_errors(dataframe=polars_dataframe, schema=schema) if errors: raise DataFrameValidationError(errors=errors, model=schema) - diff --git a/tests/test_dummy_data.py b/tests/test_dummy_data.py index 0a3fe6d..ace5765 100644 --- a/tests/test_dummy_data.py +++ b/tests/test_dummy_data.py @@ -86,11 +86,11 @@ def test_generation_of_unique_data(): class UniqueModel(pt.Model): bool_column: bool - string_column: str = pt.Field(json_schema_extra={"unique":True}) - int_column: int = pt.Field(json_schema_extra={"unique":True}) - float_column: int = pt.Field(json_schema_extra={"unique":True}) - date_column: date = pt.Field(json_schema_extra={"unique":True}) - datetime_column: datetime = pt.Field(json_schema_extra={"unique":True}) + string_column: str = pt.Field(unique=True) + int_column: int = pt.Field(unique=True) + float_column: int = pt.Field(unique=True) + date_column: date = pt.Field(unique=True) + datetime_column: datetime = pt.Field(unique=True) example_df = UniqueModel.examples({"bool_column": [True, False]}) for column in UniqueModel.columns: diff --git a/tests/test_model.py b/tests/test_model.py index eefbfae..4192813 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -284,7 +284,7 @@ def test_model_validator(model: Type[pt.Model]) -> None: """Test if all field validators have been included correctly.""" with pytest.raises(ValidationError) as e: model(left=1, opt_left=1, right=1, opt_right=1) - pattern = re.compile(r'Input should be greater than 20') + pattern = re.compile(r"Input should be greater than 20") assert len(pattern.findall(str(e.value))) == 2 # An inner join should keep nullability information @@ -388,7 +388,7 @@ class MyModel(pt.Model): ExpandedModel = MyModel.with_fields( b=(int, ...), - c=(int, None), # TODO should this be nullable if not specified as optional? + c=(int, None), d=(int, pt.Field(gt=10)), e=(Optional[int], None), ) @@ -425,28 +425,32 @@ class InvalidEnumModel(pt.Model): with pytest.raises(TypeError, match=r".*Encountered types: \['int', 'str'\]\."): InvalidEnumModel.sql_types -def test_pt_fields(): +def test_pt_fields(): class Model(pt.Model): a: int b: int = pt.Field(constraints=[(pl.col("b") < 10)]) c: int = pt.Field(derived_from=pl.col("a") + pl.col("b")) d: int = pt.Field(dtype=pl.UInt8) e: int = pt.Field(unique=True) - + schema = Model.model_json_schema() # no serialization issues - props = Model._schema_properties() # extra fields are stored in modified schema_properties - assert 'constraints' in props['b'] - assert 'derived_from' in props['c'] - assert 'dtype' in props['d'] - assert 'unique' in props['e'] - - fields = Model.model_fields # attributes are properly set and catalogued on the `FieldInfo` objects - assert 'constraints' in fields['b']._attributes_set - assert fields['b'].constraints is not None - assert 'derived_from' in fields['c']._attributes_set - assert fields['c'].derived_from is not None - assert 'dtype' in fields['d']._attributes_set - assert fields['d'].dtype is not None - assert 'unique' in fields['e']._attributes_set - assert fields['e'].unique is not None \ No newline at end of file + props = ( + Model._schema_properties() + ) # extra fields are stored in modified schema_properties + assert "constraints" in props["b"] + assert "derived_from" in props["c"] + assert "dtype" in props["d"] + assert "unique" in props["e"] + + fields = ( + Model.model_fields + ) # attributes are properly set and catalogued on the `FieldInfo` objects + assert "constraints" in fields["b"]._attributes_set + assert fields["b"].constraints is not None + assert "derived_from" in fields["c"]._attributes_set + assert fields["c"].derived_from is not None + assert "dtype" in fields["d"]._attributes_set + assert fields["d"].dtype is not None + assert "unique" in fields["e"]._attributes_set + assert fields["e"].unique is not None diff --git a/tests/test_polars.py b/tests/test_polars.py index ee39089..c288c0e 100644 --- a/tests/test_polars.py +++ b/tests/test_polars.py @@ -159,8 +159,8 @@ def test_correct_columns_and_dtype_on_read(tmp_path): """A model DataFrame should aid CSV reading with column names and dtypes.""" class Foo(pt.Model): - a: str = pt.Field(json_schema_extra={"derived_from":"column_1"}) - b: int = pt.Field(json_schema_extra={"derived_from":"column_2"}) + a: str = pt.Field(derived_from="column_1") + b: int = pt.Field(derived_from="column_2") csv_path = tmp_path / "foo.csv" csv_path.write_text("1,2") @@ -189,7 +189,7 @@ class Foo(pt.Model): assert unspecified_column_df.dtypes == [pl.Utf8, pl.Int64, pl.Float64] class DerivedModel(pt.Model): - cents: int = pt.Field(json_schema_extra={"derived_from":100 * pl.col("dollars")}) + cents: int = pt.Field(derived_from=100 * pl.col("dollars")) csv_path.write_text("month,dollars\n1,2.99") derived_df = DerivedModel.DataFrame.read_csv(csv_path) diff --git a/tests/test_validators.py b/tests/test_validators.py index e96ecf3..9eb4cb3 100644 --- a/tests/test_validators.py +++ b/tests/test_validators.py @@ -2,14 +2,13 @@ import enum import sys from datetime import date, datetime -from typing import List, Optional, Union +from typing import List, Optional, Union, Literal import polars as pl import pytest -from typing_extensions import Literal import patito as pt -from patito.exceptions import DataFrameValidationError +from patito import DataFrameValidationError from patito.validators import _dewrap_optional, _is_optional, validate @@ -236,7 +235,7 @@ def test_uniqueness_validation(): """It should be able to validate uniqueness.""" class MyModel(pt.Model): - column: int = pt.Field(json_schema_extra={"unique":True}) + column: int = pt.Field(unique=True) non_duplicated_df = pt.DataFrame({"column": [1, 2, 3]}) MyModel.validate(non_duplicated_df) @@ -347,7 +346,7 @@ def test_uniqueness_constraint_validation(): """Uniqueness constraints should be validated.""" class UniqueModel(pt.Model): - product_id: int = pt.Field(json_schema_extra={"unique":True}) + product_id: int = pt.Field(unique=True) validate(dataframe=pl.DataFrame({"product_id": [1, 2]}), schema=UniqueModel) @@ -375,7 +374,7 @@ class BoundModel(pt.Model): multiple_column: float = pt.Field(multiple_of=0.5) # const fields should now use Literal instead, but pyright # complains about Literal of float values - const_column: Literal[3.1415] = pt.Field(default=3.1415) #type: ignore + const_column: Literal[3.1415] = pt.Field(default=3.1415) # type: ignore regex_column: str = pt.Field(pattern=r"value [A-Z]") min_length_column: str = pt.Field(min_length=2) max_length_column: str = pt.Field(max_length=2) @@ -418,7 +417,7 @@ class DTypeModel(pt.Model): smallint_column: int = pt.Field(dtype=pl.Int8) unsigned_int_column: int = pt.Field(dtype=pl.UInt64) unsigned_smallint_column: int = pt.Field(dtype=pl.UInt8) - + assert DTypeModel.dtypes == { "int_column": pl.Int64, "int_explicit_dtype_column": pl.Int64, @@ -600,4 +599,4 @@ class ListModel(pt.Model): ]: # print(old, new) with pytest.raises(DataFrameValidationError): - ListModel.validate(valid_df.with_columns(pl.col(old).alias(new))) \ No newline at end of file + ListModel.validate(valid_df.with_columns(pl.col(old).alias(new))) From e2bf0d7351ca8750affcbae8f65ce0e48357dcd5 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Fri, 3 Nov 2023 09:49:19 -0400 Subject: [PATCH 08/29] fix: handle multiple annotations when `dtype` is present in `Field` --- .gitignore | 2 ++ src/patito/pydantic.py | 3 ++- tests/test_model.py | 8 ++++++++ tests/test_polars.py | 2 ++ 4 files changed, 14 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 0f894dc..138ab5c 100644 --- a/.gitignore +++ b/.gitignore @@ -132,3 +132,5 @@ dmypy.json # Pyre type checker .pyre/ + +.vscode/ \ No newline at end of file diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 1434607..6e63e75 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -218,7 +218,8 @@ def _valid_dtypes( # noqa: C901 f"No valid dtype mapping found for column '{column}'." ) return [pl.List(dtype) for dtype in item_dtypes] - if "dtype" in props: + + if "dtype" in props and 'anyOf' not in props: return [ props["dtype"], ] diff --git a/tests/test_model.py b/tests/test_model.py index 4192813..6ae74b6 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -454,3 +454,11 @@ class Model(pt.Model): assert fields["d"].dtype is not None assert "unique" in fields["e"]._attributes_set assert fields["e"].unique is not None + + +def test_nullable_columns(): + class Test(pt.Model): + foo: str | None = pt.Field(dtype=pl.Utf8) + + assert Test.nullable_columns == {"foo"} + assert set(Test.valid_dtypes['foo']) == {pl.Utf8, pl.Null} diff --git a/tests/test_polars.py b/tests/test_polars.py index c288c0e..b156abe 100644 --- a/tests/test_polars.py +++ b/tests/test_polars.py @@ -253,3 +253,5 @@ class Model(pt.Model): # Or a list of columns assert df.drop(["column_1", "column_2"]).columns == [] + + From 017c59b2373705059b951b6a22f8f6dd787c851a Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Fri, 3 Nov 2023 10:07:19 -0400 Subject: [PATCH 09/29] check: annotated dtypes match those specified in Field.dtype --- src/patito/pydantic.py | 11 ++++++++--- tests/test_model.py | 19 +++++++++++++++++++ 2 files changed, 27 insertions(+), 3 deletions(-) diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 6e63e75..23123a8 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -220,6 +220,8 @@ def _valid_dtypes( # noqa: C901 return [pl.List(dtype) for dtype in item_dtypes] if "dtype" in props and 'anyOf' not in props: + if props['dtype'] not in cls._pydantic_type_to_valid_polars_types(props): # TODO should we allow pl floats for integer columns? Other type hierarchies to consider? + raise ValueError(f"Invalid dtype {props['dtype']} for column '{column}'. Check that specified dtype is allowable for the given type annotations.") return [ props["dtype"], ] @@ -237,7 +239,12 @@ def _valid_dtypes( # noqa: C901 column, {"type": PYTHON_TO_PYDANTIC_TYPES.get(type(props["const"]))} ) return None - elif props["type"] == "integer": + + return cls._pydantic_type_to_valid_polars_types(props) + + @staticmethod + def _pydantic_type_to_valid_polars_types(props: Dict) -> Optional[List[pl.DataType]]: + if props["type"] == "integer": return [ pl.Int64, pl.Int32, @@ -272,8 +279,6 @@ def _valid_dtypes( # noqa: C901 return None # pragma: no cover elif props["type"] == "null": return [pl.Null] - else: # pragma: no cover - return None @property def valid_sql_types( # type: ignore # noqa: C901 diff --git a/tests/test_model.py b/tests/test_model.py index 6ae74b6..c528a66 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -462,3 +462,22 @@ class Test(pt.Model): assert Test.nullable_columns == {"foo"} assert set(Test.valid_dtypes['foo']) == {pl.Utf8, pl.Null} + + +def test_conflicting_type_dtype(): + + class Test(pt.Model): + foo: int = pt.Field(dtype=pl.Utf8) + + with pytest.raises(ValueError): + Test.valid_dtypes() + + class Test(pt.Model): + foo: str = pt.Field(dtype=pl.Float32) + + with pytest.raises(ValueError): + Test.valid_dtypes() + + +if __name__ == "__main__": + test_conflicting_type_dtype() From 161300b80c1f898d7246b46bd0a918e6ab818426 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Fri, 3 Nov 2023 10:42:17 -0400 Subject: [PATCH 10/29] chore: misc typing improvements --- src/patito/exceptions.py | 2 +- src/patito/pydantic.py | 44 +++++++++++++++++++++++++++------------- tests/test_model.py | 19 +++++++---------- tests/test_polars.py | 2 -- 4 files changed, 38 insertions(+), 29 deletions(-) diff --git a/src/patito/exceptions.py b/src/patito/exceptions.py index 7240c55..1a816e6 100644 --- a/src/patito/exceptions.py +++ b/src/patito/exceptions.py @@ -127,7 +127,7 @@ def __repr_args__(self) -> "ReprArgs": class DataFrameValidationError(Representation, ValueError): __slots__ = "raw_errors", "model", "_error_cache" - def __init__(self, errors: Sequence[ErrorList], model: "BaseModel") -> None: + def __init__(self, errors: Sequence[ErrorList], model: Type["BaseModel"]) -> None: self.raw_errors = errors self.model = model self._error_cache: Optional[List["ErrorDict"]] = None diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 23123a8..86cd75a 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -126,7 +126,7 @@ def columns(cls: Type[ModelType]) -> List[str]: # type: ignore @property def dtypes( # type: ignore cls: Type[ModelType], # pyright: ignore - ) -> dict[str, Type[pl.DataType]]: + ) -> dict[str, PolarsDataType]: """ Return the polars dtypes of the dataframe. @@ -153,7 +153,7 @@ def dtypes( # type: ignore @property def valid_dtypes( # type: ignore cls: Type[ModelType], # pyright: ignore - ) -> dict[str, List[Union[pl.PolarsDataType, pl.List]]]: + ) -> dict[str, List[Union[PolarsDataType, pl.List]]]: """ Return a list of polars dtypes which Patito considers valid for each field. @@ -197,10 +197,10 @@ def valid_dtypes( # type: ignore @classmethod def _valid_dtypes( # noqa: C901 - cls: Type[ModelType], + cls: Type[ModelType], # pyright: ignore column: str, props: Dict, - ) -> Optional[List[pl.PolarsDataType]]: + ) -> Optional[List[PolarsDataType]]: """ Map schema property to list of valid polars data types. @@ -218,10 +218,14 @@ def _valid_dtypes( # noqa: C901 f"No valid dtype mapping found for column '{column}'." ) return [pl.List(dtype) for dtype in item_dtypes] - - if "dtype" in props and 'anyOf' not in props: - if props['dtype'] not in cls._pydantic_type_to_valid_polars_types(props): # TODO should we allow pl floats for integer columns? Other type hierarchies to consider? - raise ValueError(f"Invalid dtype {props['dtype']} for column '{column}'. Check that specified dtype is allowable for the given type annotations.") + + if "dtype" in props and "anyOf" not in props: + if props["dtype"] not in cls._pydantic_type_to_valid_polars_types( + props + ): # TODO should we allow pl floats for integer columns? Other type hierarchies to consider? + raise ValueError( + f"Invalid dtype {props['dtype']} for column '{column}'. Check that specified dtype is allowable for the given type annotations." + ) return [ props["dtype"], ] @@ -239,11 +243,13 @@ def _valid_dtypes( # noqa: C901 column, {"type": PYTHON_TO_PYDANTIC_TYPES.get(type(props["const"]))} ) return None - + return cls._pydantic_type_to_valid_polars_types(props) @staticmethod - def _pydantic_type_to_valid_polars_types(props: Dict) -> Optional[List[pl.DataType]]: + def _pydantic_type_to_valid_polars_types( + props: Dict, + ) -> Optional[List[PolarsDataType]]: if props["type"] == "integer": return [ pl.Int64, @@ -574,6 +580,9 @@ class Model(BaseModel, metaclass=ModelMetaclass): defaults: ClassVar[Dict[str, Any]] + if TYPE_CHECKING: + model_fields: ClassVar[dict[str, FieldInfo]] + @classmethod # type: ignore[misc] @property def DataFrame( @@ -786,6 +795,8 @@ def example_value( # noqa: C901 field_type = "null" else: field_type = allowable[0] + else: + raise NotImplementedError if "const" in properties: # The default value is the only valid value, provided as const return properties["const"] @@ -1461,8 +1472,13 @@ def _derive_field( if x in field.__slots__ and x not in ["annotation", "default"] } if make_nullable: - # This originally non-nullable field has become nullable - field_type = Optional[field_type] + if field_type is None: + raise TypeError( + "Cannot make field nullable if no type annotation is provided!" + ) + else: + # This originally non-nullable field has become nullable + field_type = Optional[field_type] elif field.is_required() and default is None: # We need to replace Pydantic's None default value with ... in order # to make it clear that the field is still non-nullable and @@ -1507,10 +1523,10 @@ def __init__( ) -def Field( +def Field( # noqa: C901 *args, **kwargs, -): +) -> Any: pt_kwargs = {k: kwargs.pop(k, None) for k in get_args(PT_INFO)} meta_kwargs = { k: v for k, v in kwargs.items() if k in fields.FieldInfo.metadata_lookup diff --git a/tests/test_model.py b/tests/test_model.py index c528a66..e075342 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -461,23 +461,18 @@ class Test(pt.Model): foo: str | None = pt.Field(dtype=pl.Utf8) assert Test.nullable_columns == {"foo"} - assert set(Test.valid_dtypes['foo']) == {pl.Utf8, pl.Null} + assert set(Test.valid_dtypes["foo"]) == {pl.Utf8, pl.Null} def test_conflicting_type_dtype(): - - class Test(pt.Model): + class Test1(pt.Model): foo: int = pt.Field(dtype=pl.Utf8) with pytest.raises(ValueError): - Test.valid_dtypes() - - class Test(pt.Model): + Test1.valid_dtypes + + class Test2(pt.Model): foo: str = pt.Field(dtype=pl.Float32) - - with pytest.raises(ValueError): - Test.valid_dtypes() - -if __name__ == "__main__": - test_conflicting_type_dtype() + with pytest.raises(ValueError): + Test2.valid_dtypes diff --git a/tests/test_polars.py b/tests/test_polars.py index b156abe..c288c0e 100644 --- a/tests/test_polars.py +++ b/tests/test_polars.py @@ -253,5 +253,3 @@ class Model(pt.Model): # Or a list of columns assert df.drop(["column_1", "column_2"]).columns == [] - - From 9e132bc49baebaf3cf874c3469e61c811df88b74 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Fri, 3 Nov 2023 11:27:50 -0400 Subject: [PATCH 11/29] fix: search for nested field constraints on validation, ignore nulls --- src/patito/validators.py | 26 +++++++++++++++++++------- tests/test_validators.py | 21 +++++++++++++++++++++ 2 files changed, 40 insertions(+), 7 deletions(-) diff --git a/src/patito/validators.py b/src/patito/validators.py index 09cbd07..871e043 100644 --- a/src/patito/validators.py +++ b/src/patito/validators.py @@ -244,22 +244,34 @@ def _find_errors( # noqa: C901 "minLength": lambda v: col.str.len_chars() >= v, "maxLength": lambda v: col.str.len_chars() <= v, } - checks = [ + if "anyOf" in column_properties: + checks = [ + check(x[key]) + for key, check in filters.items() + for x in column_properties["anyOf"] + if key in x + ] + else: + checks = [] + checks += [ check(column_properties[key]) for key, check in filters.items() if key in column_properties ] if checks: - lazy_df = dataframe.lazy() + n_invalid_rows = 0 for check in checks: - lazy_df = lazy_df.filter(check) - valid_rows = lazy_df.collect() - invalid_rows = dataframe.height - valid_rows.height - if invalid_rows > 0: + lazy_df = dataframe.lazy() + lazy_df = lazy_df.filter( + ~check + ) # get failing rows (nulls will evaluate to null on boolean check, we only want failures (false))) + invalid_rows = lazy_df.collect() + n_invalid_rows += invalid_rows.height + if n_invalid_rows > 0: errors.append( ErrorWrapper( RowValueError( - f"{invalid_rows} row{'' if invalid_rows == 1 else 's'} " + f"{n_invalid_rows} row{'' if n_invalid_rows == 1 else 's'} " "with out of bound values." ), loc=column_name, diff --git a/tests/test_validators.py b/tests/test_validators.py index 9eb4cb3..127cb01 100644 --- a/tests/test_validators.py +++ b/tests/test_validators.py @@ -3,6 +3,7 @@ import sys from datetime import date, datetime from typing import List, Optional, Union, Literal +import re import polars as pl import pytest @@ -600,3 +601,23 @@ class ListModel(pt.Model): # print(old, new) with pytest.raises(DataFrameValidationError): ListModel.validate(valid_df.with_columns(pl.col(old).alias(new))) + + +def test_nested_field_attrs(): + """ensure that constraints are respected even when embedded inside 'anyOf'""" + + class Test(pt.Model): + foo: int | None = pt.Field( + dtype=pl.Int64, ge=0, le=100, constraints=pt.field.sum() == 100 + ) + + test_df = Test.DataFrame( + {"foo": [110, -10]} + ) # meets constraint, but violates bounds (embedded in 'anyOf' in properties) + with pytest.raises(DataFrameValidationError) as e: + Test.validate(test_df) + pattern = re.compile(r"2 rows with out of bound values") + assert len(pattern.findall(str(e.value))) == 1 + + null_test_df = Test.DataFrame({"foo": [100, None, None]}) + Test.validate(null_test_df) # should not raise From 93e171e3424f39269ccabbc6b31e00219ed77bd0 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Fri, 3 Nov 2023 14:01:04 -0400 Subject: [PATCH 12/29] fix: better inference for column nullability --- src/patito/pydantic.py | 29 ++++++++++++++++++++++++----- tests/test_model.py | 31 +++++++++++++++++++++++++++---- 2 files changed, 51 insertions(+), 9 deletions(-) diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 86cd75a..339af01 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -219,10 +219,27 @@ def _valid_dtypes( # noqa: C901 ) return [pl.List(dtype) for dtype in item_dtypes] - if "dtype" in props and "anyOf" not in props: - if props["dtype"] not in cls._pydantic_type_to_valid_polars_types( - props - ): # TODO should we allow pl floats for integer columns? Other type hierarchies to consider? + if "dtype" in props: + + def dtype_invalid(props: Dict) -> bool: + if "type" in props and props[ + "dtype" + ] not in cls._pydantic_type_to_valid_polars_types(props): + return True + elif "anyOf" in props: + for sub_props in props["anyOf"]: + if sub_props["type"] == "null": + continue + else: + if props[ + "dtype" + ] not in cls._pydantic_type_to_valid_polars_types( + sub_props + ): + return True + return False + + if dtype_invalid(props): raise ValueError( f"Invalid dtype {props['dtype']} for column '{column}'. Check that specified dtype is allowable for the given type annotations." ) @@ -502,7 +519,9 @@ def non_nullable_columns( # type: ignore ['another_non_nullable_field', 'non_nullable_field'] """ return set( - k for k in cls.valid_dtypes.keys() if pl.Null not in cls.valid_dtypes[k] + k + for k in cls.columns + if type(None) not in get_args(cls.model_fields[k].annotation) ) @property diff --git a/tests/test_model.py b/tests/test_model.py index e075342..38a18b4 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -457,22 +457,45 @@ class Model(pt.Model): def test_nullable_columns(): - class Test(pt.Model): + class Test1(pt.Model): foo: str | None = pt.Field(dtype=pl.Utf8) - assert Test.nullable_columns == {"foo"} - assert set(Test.valid_dtypes["foo"]) == {pl.Utf8, pl.Null} + assert Test1.nullable_columns == {"foo"} + assert set(Test1.valid_dtypes["foo"]) == {pl.Utf8} + + class Test2(pt.Model): + foo: int | None = pt.Field(dtype=pl.UInt32) + + assert Test2.nullable_columns == {"foo"} + assert set(Test2.valid_dtypes["foo"]) == {pl.UInt32} def test_conflicting_type_dtype(): class Test1(pt.Model): foo: int = pt.Field(dtype=pl.Utf8) - with pytest.raises(ValueError): + with pytest.raises(ValueError) as e: Test1.valid_dtypes + assert "Invalid dtype" in str(e.value) class Test2(pt.Model): foo: str = pt.Field(dtype=pl.Float32) with pytest.raises(ValueError): Test2.valid_dtypes + + class Test3(pt.Model): + foo: str | None = pt.Field(dtype=pl.UInt32) + + with pytest.raises(ValueError): + Test3.valid_dtypes + + +def test_polars_python_type_harmonization(): + class Test(pt.Model): + date: datetime = pt.Field(dtype=pl.Datetime(time_unit="us")) + # TODO add more other lesser-used type combinations here + + assert type(Test.valid_dtypes["date"][0]) == pl.Datetime + assert Test.valid_dtypes["date"][0].time_unit == "us" + assert Test.valid_dtypes["date"][0].time_zone == None From e9dbca6b0d6986fc3b76bda074e26d099c291b1e Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Mon, 6 Nov 2023 09:54:00 -0500 Subject: [PATCH 13/29] wip: better type mismatch messaging, move Representation to internal --- src/patito/_pydantic/repr.py | 144 +++++++++++++++++++++++++++++++++++ src/patito/exceptions.py | 74 +----------------- src/patito/pydantic.py | 27 +++---- tests/test_model.py | 17 ++++- 4 files changed, 176 insertions(+), 86 deletions(-) create mode 100644 src/patito/_pydantic/repr.py diff --git a/src/patito/_pydantic/repr.py b/src/patito/_pydantic/repr.py new file mode 100644 index 0000000..35f2777 --- /dev/null +++ b/src/patito/_pydantic/repr.py @@ -0,0 +1,144 @@ +import types +import typing +from typing import ( + Any, + Tuple, + Callable, + Generator, + Union, + Sequence, + Iterable, + Optional, + get_origin, + get_args, + Literal, +) +import sys + +if typing.TYPE_CHECKING: + Loc = Tuple[Union[int, str], ...] + ReprArgs = Sequence[Tuple[Optional[str], Any]] + RichReprResult = Iterable[ + Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]] + ] + +try: + from typing import _TypingBase # type: ignore[attr-defined] +except ImportError: + from typing import _Final as _TypingBase # type: ignore[attr-defined] + +typing_base = _TypingBase + +if sys.version_info < (3, 9): + # python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on) + TypingGenericAlias = () +else: + from typing import GenericAlias as TypingGenericAlias # type: ignore + +if sys.version_info < (3, 10): + + def origin_is_union(tp: type[Any] | None) -> bool: + return tp is typing.Union + + WithArgsTypes = (TypingGenericAlias,) + +else: + + def origin_is_union(tp: type[Any] | None) -> bool: + return tp is typing.Union or tp is types.UnionType + + WithArgsTypes = typing._GenericAlias, types.GenericAlias, types.UnionType # type: ignore[attr-defined] + + +class Representation: + """ + Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details. + + __pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations + of objects. + """ + + __slots__: Tuple[str, ...] = tuple() + + def __repr_args__(self) -> "ReprArgs": + """ + Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden. + + Can either return: + * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]` + * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]` + """ + attrs = ((s, getattr(self, s)) for s in self.__slots__) + return [(a, v) for a, v in attrs if v is not None] + + def __repr_name__(self) -> str: + """ + Name of the instance's class, used in __repr__. + """ + return self.__class__.__name__ + + def __repr_str__(self, join_str: str) -> str: + return join_str.join( + repr(v) if a is None else f"{a}={v!r}" for a, v in self.__repr_args__() + ) + + def __pretty__( + self, fmt: Callable[[Any], Any], **kwargs: Any + ) -> Generator[Any, None, None]: + """ + Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects + """ + yield self.__repr_name__() + "(" + yield 1 + for name, value in self.__repr_args__(): + if name is not None: + yield name + "=" + yield fmt(value) + yield "," + yield 0 + yield -1 + yield ")" + + def __str__(self) -> str: + return self.__repr_str__(" ") + + def __repr__(self) -> str: + return f'{self.__repr_name__()}({self.__repr_str__(", ")})' + + def __rich_repr__(self) -> "RichReprResult": + """Get fields for Rich library""" + for name, field_repr in self.__repr_args__(): + if name is None: + yield field_repr + else: + yield name, field_repr + + +def display_as_type(obj: Any) -> str: + """Pretty representation of a type, should be as close as possible to the original type definition string. + + Takes some logic from `typing._type_repr`. + """ + if isinstance(obj, types.FunctionType): + return obj.__name__ + elif obj is ...: + return "..." + elif isinstance(obj, Representation): + return repr(obj) + + if not isinstance(obj, (typing_base, WithArgsTypes, type)): + obj = obj.__class__ + + if origin_is_union(get_origin(obj)): + args = ", ".join(map(display_as_type, get_args(obj))) + return f"Union[{args}]" + elif isinstance(obj, WithArgsTypes): + if get_origin(obj) == Literal: + args = ", ".join(map(repr, get_args(obj))) + else: + args = ", ".join(map(display_as_type, get_args(obj))) + return f"{obj.__qualname__}[{args}]" + elif isinstance(obj, type): + return obj.__qualname__ + else: + return repr(obj).replace("typing.", "").replace("typing_extensions.", "") diff --git a/src/patito/exceptions.py b/src/patito/exceptions.py index 1a816e6..29ecafa 100644 --- a/src/patito/exceptions.py +++ b/src/patito/exceptions.py @@ -15,6 +15,8 @@ Iterable, ) +from patito._pydantic.repr import Representation + if TYPE_CHECKING: from pydantic import BaseModel @@ -28,78 +30,10 @@ class _ErrorDictRequired(TypedDict): class ErrorDict(_ErrorDictRequired, total=False): ctx: Dict[str, Any] - Loc = Tuple[Union[int, str], ...] - ReprArgs = Sequence[Tuple[Optional[str], Any]] - RichReprResult = Iterable[ - Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]] - ] - - -__all__ = "ErrorWrapper", "ValidationError" + from patito._pydantic.repr import ReprArgs -class Representation: - """ - Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details. - - __pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations - of objects. - """ - - __slots__: Tuple[str, ...] = tuple() - - def __repr_args__(self) -> "ReprArgs": - """ - Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden. - - Can either return: - * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]` - * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]` - """ - attrs = ((s, getattr(self, s)) for s in self.__slots__) - return [(a, v) for a, v in attrs if v is not None] - - def __repr_name__(self) -> str: - """ - Name of the instance's class, used in __repr__. - """ - return self.__class__.__name__ - - def __repr_str__(self, join_str: str) -> str: - return join_str.join( - repr(v) if a is None else f"{a}={v!r}" for a, v in self.__repr_args__() - ) - - def __pretty__( - self, fmt: Callable[[Any], Any], **kwargs: Any - ) -> Generator[Any, None, None]: - """ - Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects - """ - yield self.__repr_name__() + "(" - yield 1 - for name, value in self.__repr_args__(): - if name is not None: - yield name + "=" - yield fmt(value) - yield "," - yield 0 - yield -1 - yield ")" - - def __str__(self) -> str: - return self.__repr_str__(" ") - - def __repr__(self) -> str: - return f'{self.__repr_name__()}({self.__repr_str__(", ")})' - - def __rich_repr__(self) -> "RichReprResult": - """Get fields for Rich library""" - for name, field_repr in self.__repr_args__(): - if name is None: - yield field_repr - else: - yield name, field_repr +__all__ = "ErrorWrapper", "DataFrameValidationError" class ErrorWrapper(Representation): diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 339af01..6dddf4e 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -32,6 +32,7 @@ from patito.polars import DataFrame, LazyFrame from patito.validators import validate +from patito._pydantic.repr import display_as_type try: import pandas as pd @@ -221,27 +222,27 @@ def _valid_dtypes( # noqa: C901 if "dtype" in props: - def dtype_invalid(props: Dict) -> bool: - if "type" in props and props[ - "dtype" - ] not in cls._pydantic_type_to_valid_polars_types(props): - return True + def dtype_invalid(props: Dict) -> Tuple[bool, List[PolarsDataType]]: + if "type" in props: + valid_pl_types = cls._pydantic_type_to_valid_polars_types(props) + if props["dtype"] not in valid_pl_types: + return True, valid_pl_types or [] elif "anyOf" in props: for sub_props in props["anyOf"]: if sub_props["type"] == "null": continue else: - if props[ - "dtype" - ] not in cls._pydantic_type_to_valid_polars_types( + valid_pl_types = cls._pydantic_type_to_valid_polars_types( sub_props - ): - return True - return False + ) + if props["dtype"] not in valid_pl_types: + return True, valid_pl_types or [] + return False, [] - if dtype_invalid(props): + invalid, valid_pl_types = dtype_invalid(props) + if invalid: raise ValueError( - f"Invalid dtype {props['dtype']} for column '{column}'. Check that specified dtype is allowable for the given type annotations." + f"Invalid dtype {props['dtype']} for column '{column}'. Allowable polars dtypes for {display_as_type(cls.model_fields[column].annotation)} are: {', '.join([str(x) for x in valid_pl_types])}." ) return [ props["dtype"], diff --git a/tests/test_model.py b/tests/test_model.py index 38a18b4..b011102 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -476,19 +476,30 @@ class Test1(pt.Model): with pytest.raises(ValueError) as e: Test1.valid_dtypes - assert "Invalid dtype" in str(e.value) + assert ( + f"Invalid dtype Utf8 for column 'foo'. Allowable polars dtypes for int are: {', '.join(str(x) for x in PL_INTEGER_DTYPES)}." + == str(e.value) + ) class Test2(pt.Model): foo: str = pt.Field(dtype=pl.Float32) - with pytest.raises(ValueError): + with pytest.raises(ValueError) as e: Test2.valid_dtypes + assert ( + "Invalid dtype Float32 for column 'foo'. Allowable polars dtypes for str are: Utf8." + == str(e.value) + ) class Test3(pt.Model): foo: str | None = pt.Field(dtype=pl.UInt32) - with pytest.raises(ValueError): + with pytest.raises(ValueError) as e: Test3.valid_dtypes + assert ( + "Invalid dtype UInt32 for column 'foo'. Allowable polars dtypes for Union[str, NoneType] are: Utf8." + == str(e.value) + ) def test_polars_python_type_harmonization(): From 399673e1be00cd231d010218f8f27740cb8dffe2 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Mon, 6 Nov 2023 10:20:03 -0500 Subject: [PATCH 14/29] wip: onto classproperty formulation in lieu of metaclass not supported python<3.9, will break in python>=3.13 at that point, we can roll our own following polars: https://github.com/pola-rs/polars/blob/8d29d3cebec713363db4ad5d782c74047e24314d/py-polars/polars/datatypes/classes.py#L25C12-L25C12 --- pyproject.toml | 2 +- src/patito/_pydantic/__init__.py | 0 src/patito/pydantic.py | 95 ++++++++++++-------------------- tests/test_model.py | 1 + 4 files changed, 36 insertions(+), 62 deletions(-) create mode 100644 src/patito/_pydantic/__init__.py diff --git a/pyproject.toml b/pyproject.toml index c0b4261..28c5522 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ documentation = "https://patito.readthedocs.io" keywords = ["validation", "dataframe"] [tool.poetry.dependencies] -python = "^3.8" +python = ">=3.9,<3.13" pydantic = ">=2.0.0" polars = ">=0.19.0" # Required for typing.Literal in python3.7 diff --git a/src/patito/_pydantic/__init__.py b/src/patito/_pydantic/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 6dddf4e..394472e 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -73,6 +73,17 @@ type(None): "null", } +PL_INTEGER_DTYPES = [ + pl.Int64, + pl.Int32, + pl.Int16, + pl.Int8, + pl.UInt64, + pl.UInt32, + pl.UInt16, + pl.UInt8, +] + class ModelMetaclass(PydanticModelMetaclass): """ @@ -81,30 +92,16 @@ class ModelMetaclass(PydanticModelMetaclass): Responsible for setting any relevant model-dependent class properties. """ - def __init__(cls, name: str, bases: tuple, clsdict: dict, **kwargs) -> None: - """ - Construct new patito model. + ... - Args: - name: Name of model class. - bases: Tuple of superclasses. - clsdict: Dictionary containing class properties. - """ - super().__init__(name, bases, clsdict) - # Add a custom subclass of patito.DataFrame to the model class, - # where .set_model() has been implicitly set. - cls.DataFrame = DataFrame._construct_dataframe_model_class( - model=cls, # type: ignore - ) - # Similarly for LazyFrame - cls.LazyFrame = LazyFrame._construct_lazyframe_model_class( - model=cls, # type: ignore - ) - # --- Class properties --- - # These properties will only be available on Model *classes*, not instantiated - # objects This is backwards compatible to python versions before python 3.9, - # unlike a combination of @classmethod and @property. +class Model(BaseModel, metaclass=ModelMetaclass): + """Custom pydantic class for representing table schema and constructing rows.""" + + if TYPE_CHECKING: + model_fields: ClassVar[Dict[str, FieldInfo]] + + @classmethod @property def columns(cls: Type[ModelType]) -> List[str]: # type: ignore """ @@ -124,6 +121,7 @@ def columns(cls: Type[ModelType]) -> List[str]: # type: ignore """ return list(cls.model_json_schema()["properties"].keys()) + @classmethod @property def dtypes( # type: ignore cls: Type[ModelType], # pyright: ignore @@ -151,6 +149,7 @@ def dtypes( # type: ignore column: valid_dtypes[0] for column, valid_dtypes in cls.valid_dtypes.items() } + @classmethod @property def valid_dtypes( # type: ignore cls: Type[ModelType], # pyright: ignore @@ -269,16 +268,7 @@ def _pydantic_type_to_valid_polars_types( props: Dict, ) -> Optional[List[PolarsDataType]]: if props["type"] == "integer": - return [ - pl.Int64, - pl.Int32, - pl.Int16, - pl.Int8, - pl.UInt64, - pl.UInt32, - pl.UInt16, - pl.UInt8, - ] + return PL_INTEGER_DTYPES elif props["type"] == "number": if props.get("format") == "time-delta": return [pl.Duration] @@ -304,6 +294,7 @@ def _pydantic_type_to_valid_polars_types( elif props["type"] == "null": return [pl.Null] + @classmethod @property def valid_sql_types( # type: ignore # noqa: C901 cls: Type[ModelType], # pyright: ignore @@ -439,6 +430,7 @@ def valid_sql_types( # type: ignore # noqa: C901 return valid_dtypes + @classmethod @property def sql_types( # type: ignore cls: Type[ModelType], # pyright: ignore @@ -470,6 +462,7 @@ def sql_types( # type: ignore for column, valid_types in cls.valid_sql_types.items() } + @classmethod @property def defaults( # type: ignore cls: Type[ModelType], # pyright: ignore @@ -497,6 +490,7 @@ def defaults( # type: ignore if "default" in props } + @classmethod @property def non_nullable_columns( # type: ignore cls: Type[ModelType], # pyright: ignore @@ -525,6 +519,7 @@ def non_nullable_columns( # type: ignore if type(None) not in get_args(cls.model_fields[k].annotation) ) + @classmethod @property def nullable_columns( # type: ignore cls: Type[ModelType], # pyright: ignore @@ -549,6 +544,7 @@ def nullable_columns( # type: ignore """ return set(cls.columns) - cls.non_nullable_columns + @classmethod @property def unique_columns( # type: ignore cls: Type[ModelType], # pyright: ignore @@ -574,41 +570,15 @@ def unique_columns( # type: ignore props = cls._schema_properties() return {column for column in cls.columns if props[column].get("unique", False)} - -class Model(BaseModel, metaclass=ModelMetaclass): - """Custom pydantic class for representing table schema and constructing rows.""" - - # -- Class properties set by model metaclass -- - # This weird combination of a MetaClass + type annotation - # in order to make the following work simultaneously: - # 1. Make these dynamically constructed properties of the class. - # 2. Have the correct type information for type checkers. - # 3. Allow sphinx-autodoc to construct correct documentation. - # 4. Be compatible with python 3.7. - # Once we drop support for python 3.7, we can replace all of this with just a simple - # combination of @property and @classmethod. - columns: ClassVar[List[str]] - - unique_columns: ClassVar[Set[str]] - non_nullable_columns: ClassVar[Set[str]] - nullable_columns: ClassVar[Set[str]] - - dtypes: ClassVar[Dict[str, Type[pl.DataType]]] - sql_types: ClassVar[Dict[str, str]] - valid_dtypes: ClassVar[Dict[str, List[Type[pl.DataType]]]] - valid_sql_types: ClassVar[Dict[str, List["DuckDBSQLType"]]] - - defaults: ClassVar[Dict[str, Any]] - - if TYPE_CHECKING: - model_fields: ClassVar[dict[str, FieldInfo]] - @classmethod # type: ignore[misc] @property def DataFrame( cls: Type[ModelType], ) -> Type[DataFrame[ModelType]]: # pyright: ignore # noqa """Return DataFrame class where DataFrame.set_model() is set to self.""" + return DataFrame._construct_dataframe_model_class( + model=cls, # type: ignore + ) @classmethod # type: ignore[misc] @property @@ -616,6 +586,9 @@ def LazyFrame( cls: Type[ModelType], ) -> Type[LazyFrame[ModelType]]: # pyright: ignore """Return DataFrame class where DataFrame.set_model() is set to self.""" + return LazyFrame._construct_lazyframe_model_class( + model=cls, # type: ignore + ) @classmethod def from_row( diff --git a/tests/test_model.py b/tests/test_model.py index b011102..463892b 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -10,6 +10,7 @@ from pydantic import ValidationError import patito as pt +from patito.pydantic import PL_INTEGER_DTYPES def test_model_example(): From a63db3f5d4038bea22eae0c5497d94236f6332ea Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Mon, 6 Nov 2023 11:37:02 -0500 Subject: [PATCH 15/29] wip: custom classproperty (from polars) --- pyproject.toml | 2 +- src/patito/pydantic.py | 62 +++++++++++++++++++++++++----------------- 2 files changed, 38 insertions(+), 26 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 28c5522..c0b4261 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ documentation = "https://patito.readthedocs.io" keywords = ["validation", "dataframe"] [tool.poetry.dependencies] -python = ">=3.9,<3.13" +python = "^3.8" pydantic = ">=2.0.0" polars = ">=0.19.0" # Required for typing.Literal in python3.7 diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 394472e..bd6fb3e 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -20,6 +20,7 @@ get_args, Sequence, Tuple, + Callable ) import polars as pl @@ -85,6 +86,24 @@ ] +class classproperty: + """Equivalent to @property, but works on a class (doesn't require an instance). + + https://github.com/pola-rs/polars/blob/8d29d3cebec713363db4ad5d782c74047e24314d/py-polars/polars/datatypes/classes.py#L25C12-L25C12 + """ + + def __init__(self, method: Callable[..., Any] | None = None) -> None: + self.fget = method + + def __get__(self, instance: Any, cls: type | None = None) -> Any: + return self.fget(cls) # type: ignore[misc] + + def getter(self, method: Callable[..., Any]) -> Any: # noqa: D102 + self.fget = method + return self + + + class ModelMetaclass(PydanticModelMetaclass): """ Metclass used by patito.Model. @@ -100,9 +119,12 @@ class Model(BaseModel, metaclass=ModelMetaclass): if TYPE_CHECKING: model_fields: ClassVar[Dict[str, FieldInfo]] + + model_config = ConfigDict( + ignored_types=(classproperty,), + ) - @classmethod - @property + @classproperty def columns(cls: Type[ModelType]) -> List[str]: # type: ignore """ Return the name of the dataframe columns specified by the fields of the model. @@ -121,8 +143,7 @@ def columns(cls: Type[ModelType]) -> List[str]: # type: ignore """ return list(cls.model_json_schema()["properties"].keys()) - @classmethod - @property + @classproperty def dtypes( # type: ignore cls: Type[ModelType], # pyright: ignore ) -> dict[str, PolarsDataType]: @@ -149,8 +170,7 @@ def dtypes( # type: ignore column: valid_dtypes[0] for column, valid_dtypes in cls.valid_dtypes.items() } - @classmethod - @property + @classproperty def valid_dtypes( # type: ignore cls: Type[ModelType], # pyright: ignore ) -> dict[str, List[Union[PolarsDataType, pl.List]]]: @@ -294,8 +314,7 @@ def _pydantic_type_to_valid_polars_types( elif props["type"] == "null": return [pl.Null] - @classmethod - @property + @classproperty def valid_sql_types( # type: ignore # noqa: C901 cls: Type[ModelType], # pyright: ignore ) -> dict[str, List["DuckDBSQLType"]]: @@ -430,8 +449,7 @@ def valid_sql_types( # type: ignore # noqa: C901 return valid_dtypes - @classmethod - @property + @classproperty def sql_types( # type: ignore cls: Type[ModelType], # pyright: ignore ) -> dict[str, str]: @@ -462,8 +480,7 @@ def sql_types( # type: ignore for column, valid_types in cls.valid_sql_types.items() } - @classmethod - @property + @classproperty def defaults( # type: ignore cls: Type[ModelType], # pyright: ignore ) -> dict[str, Any]: @@ -490,8 +507,7 @@ def defaults( # type: ignore if "default" in props } - @classmethod - @property + @classproperty def non_nullable_columns( # type: ignore cls: Type[ModelType], # pyright: ignore ) -> set[str]: @@ -519,8 +535,7 @@ def non_nullable_columns( # type: ignore if type(None) not in get_args(cls.model_fields[k].annotation) ) - @classmethod - @property + @classproperty def nullable_columns( # type: ignore cls: Type[ModelType], # pyright: ignore ) -> set[str]: @@ -544,8 +559,7 @@ def nullable_columns( # type: ignore """ return set(cls.columns) - cls.non_nullable_columns - @classmethod - @property + @classproperty def unique_columns( # type: ignore cls: Type[ModelType], # pyright: ignore ) -> set[str]: @@ -570,20 +584,18 @@ def unique_columns( # type: ignore props = cls._schema_properties() return {column for column in cls.columns if props[column].get("unique", False)} - @classmethod # type: ignore[misc] - @property + @classproperty def DataFrame( - cls: Type[ModelType], + cls: Type[ModelType], # type: ignore[misc] ) -> Type[DataFrame[ModelType]]: # pyright: ignore # noqa """Return DataFrame class where DataFrame.set_model() is set to self.""" return DataFrame._construct_dataframe_model_class( model=cls, # type: ignore ) - @classmethod # type: ignore[misc] - @property + @classproperty def LazyFrame( - cls: Type[ModelType], + cls: Type[ModelType], # type: ignore[misc] ) -> Type[LazyFrame[ModelType]]: # pyright: ignore """Return DataFrame class where DataFrame.set_model() is set to self.""" return LazyFrame._construct_lazyframe_model_class( @@ -592,7 +604,7 @@ def LazyFrame( @classmethod def from_row( - cls: Type[ModelType], + cls: Type[ModelType], # type: ignore[misc] row: Union["pd.DataFrame", pl.DataFrame], validate: bool = True, ) -> ModelType: From db44aa344edfa51fec9a9bc114a187b5e780f081 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Mon, 6 Nov 2023 13:07:50 -0500 Subject: [PATCH 16/29] wip: robustify array dtype inference, add pt custom fields to `Field()` --- src/patito/pydantic.py | 71 ++++++++++++++++++++-------------------- tests/test_dummy_data.py | 8 ++--- 2 files changed, 39 insertions(+), 40 deletions(-) diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index bd6fb3e..7ebcaa7 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -20,7 +20,7 @@ get_args, Sequence, Tuple, - Callable + Callable, ) import polars as pl @@ -88,7 +88,7 @@ class classproperty: """Equivalent to @property, but works on a class (doesn't require an instance). - + https://github.com/pola-rs/polars/blob/8d29d3cebec713363db4ad5d782c74047e24314d/py-polars/polars/datatypes/classes.py#L25C12-L25C12 """ @@ -103,7 +103,6 @@ def getter(self, method: Callable[..., Any]) -> Any: # noqa: D102 return self - class ModelMetaclass(PydanticModelMetaclass): """ Metclass used by patito.Model. @@ -119,7 +118,7 @@ class Model(BaseModel, metaclass=ModelMetaclass): if TYPE_CHECKING: model_fields: ClassVar[Dict[str, FieldInfo]] - + model_config = ConfigDict( ignored_types=(classproperty,), ) @@ -230,20 +229,13 @@ def _valid_dtypes( # noqa: C901 Returns: List of valid dtypes. None if no mapping exists. """ - if props.get("type") == "array": - array_props = props["items"] - item_dtypes = cls._valid_dtypes(column, array_props) - if item_dtypes is None: - raise NotImplementedError( - f"No valid dtype mapping found for column '{column}'." - ) - return [pl.List(dtype) for dtype in item_dtypes] - if "dtype" in props: def dtype_invalid(props: Dict) -> Tuple[bool, List[PolarsDataType]]: if "type" in props: - valid_pl_types = cls._pydantic_type_to_valid_polars_types(props) + valid_pl_types = cls._pydantic_type_to_valid_polars_types( + column, props + ) if props["dtype"] not in valid_pl_types: return True, valid_pl_types or [] elif "anyOf" in props: @@ -252,7 +244,7 @@ def dtype_invalid(props: Dict) -> Tuple[bool, List[PolarsDataType]]: continue else: valid_pl_types = cls._pydantic_type_to_valid_polars_types( - sub_props + column, sub_props ) if props["dtype"] not in valid_pl_types: return True, valid_pl_types or [] @@ -281,13 +273,25 @@ def dtype_invalid(props: Dict) -> Tuple[bool, List[PolarsDataType]]: ) return None - return cls._pydantic_type_to_valid_polars_types(props) + return cls._pydantic_type_to_valid_polars_types(column, props) - @staticmethod + @classmethod def _pydantic_type_to_valid_polars_types( + cls, + column: str, props: Dict, ) -> Optional[List[PolarsDataType]]: - if props["type"] == "integer": + if props["type"] == "array": + array_props = props["items"] + item_dtypes = ( + cls._valid_dtypes(column, array_props) if array_props else None + ) + if item_dtypes is None: + raise NotImplementedError( + f"No valid dtype mapping found for column '{column}'." + ) + return [pl.List(dtype) for dtype in item_dtypes] + elif props["type"] == "integer": return PL_INTEGER_DTYPES elif props["type"] == "number": if props.get("format") == "time-delta": @@ -593,7 +597,7 @@ def DataFrame( model=cls, # type: ignore ) - @classproperty + @classproperty def LazyFrame( cls: Type[ModelType], # type: ignore[misc] ) -> Type[LazyFrame[ModelType]]: # pyright: ignore @@ -1509,7 +1513,7 @@ def __init__( self, constraints: Optional[Union[pl.Expr, Sequence[pl.Expr]]] = None, derived_from: Optional[Union[str, pl.Expr]] = None, - dtype: Optional[pl.DataType] = None, + dtype: Optional[PolarsDataType] = None, unique: bool = False, **kwargs, ): @@ -1530,20 +1534,24 @@ def __init__( def Field( # noqa: C901 *args, + constraints: Optional[Union[pl.Expr, Sequence[pl.Expr]]] = None, + derived_from: Optional[Union[str, pl.Expr]] = None, + dtype: Optional[PolarsDataType] = None, + unique: bool = False, **kwargs, ) -> Any: - pt_kwargs = {k: kwargs.pop(k, None) for k in get_args(PT_INFO)} meta_kwargs = { k: v for k, v in kwargs.items() if k in fields.FieldInfo.metadata_lookup } - base_kwargs = { - k: v for k, v in kwargs.items() if k not in {**pt_kwargs, **meta_kwargs} - } + base_kwargs = {k: v for k, v in kwargs.items() if k not in meta_kwargs} finfo = fields.Field(*args, **base_kwargs) return FieldInfo( **finfo._attributes_set, **meta_kwargs, - **pt_kwargs, + constraints=constraints, + derived_from=derived_from, + dtype=dtype, + unique=unique, ) @@ -1562,19 +1570,10 @@ class FieldDoc: All rows must satisfy the given constraint. You can refer to the given column with ``pt.field``, which will automatically be replaced with ``polars.col()`` before evaluation. - unique (bool): All row values must be unique. + derived_from (Union[str, polars.Expr]): used to mark fields that are meant to be derived from other fields. Users can specify a polars expression that will be called to derive the column value when `pt.DataFrame.derive` is called. dtype (polars.datatype.DataType): The given dataframe column must have the given polars dtype, for instance ``polars.UInt64`` or ``pl.Float32``. - gt: All values must be greater than ``gt``. - ge: All values must be greater than or equal to ``ge``. - lt: All values must be less than ``lt``. - le: All values must be less than or equal to ``lt``. - multiple_of: All values must be multiples of the given value. - const (bool): If set to ``True`` `all` values must be equal to the provided - default value, the first argument provided to the ``Field`` constructor. - regex (str): UTF-8 string column must match regex pattern for all row values. - min_length (int): Minimum length of all string values in a UTF-8 column. - max_length (int): Maximum length of all string values in a UTF-8 column. + unique (bool): All row values must be unique. Return: FieldInfo: Object used to represent additional constraints put upon the given diff --git a/tests/test_dummy_data.py b/tests/test_dummy_data.py index ace5765..e3e6db2 100644 --- a/tests/test_dummy_data.py +++ b/tests/test_dummy_data.py @@ -1,10 +1,9 @@ """Test of functionality related to the generation of dummy data.""" from datetime import date, datetime -from typing import Optional +from typing import Optional, Literal, List import polars as pl import pytest -from typing_extensions import Literal import patito as pt @@ -52,11 +51,12 @@ class MyModel(pt.Model): a: int b: Optional[str] c: Optional[int] + d: Optional[List[str]] = pt.Field(dtype=pl.List(pl.Utf8)) df = MyModel.examples({"a": [1, 2]}) assert isinstance(df, pl.DataFrame) - assert df.dtypes == [pl.Int64, pl.Utf8, pl.Int64] - assert df.columns == ["a", "b", "c"] + assert df.dtypes == [pl.Int64, pl.Utf8, pl.Int64, pl.List] + assert df.columns == ["a", "b", "c", "d"] # A TypeError should be raised when you provide no column names with pytest.raises( From 438974c807c3917da93f2b7264b5f9e4172af7f0 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Wed, 8 Nov 2023 14:44:36 -0500 Subject: [PATCH 17/29] feature: custom Field/FieldInfo constructors for better extensibility --- src/patito/pydantic.py | 154 +++++++++++++++++++++++++++-------------- tests/test_model.py | 32 ++++++++- 2 files changed, 132 insertions(+), 54 deletions(-) diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 7ebcaa7..6dbdf2f 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -22,10 +22,12 @@ Tuple, Callable, ) +from functools import reduce +import inspect import polars as pl from polars.datatypes import PolarsDataType -from pydantic import fields +from pydantic import fields, PydanticUndefinedAnnotation from pydantic import ConfigDict, BaseModel, create_model # noqa: F401 from pydantic._internal._model_construction import ( ModelMetaclass as PydanticModelMetaclass, @@ -50,6 +52,23 @@ # Should be a typed subclass of Model. ModelType = TypeVar("ModelType", bound="Model") +PT_INFO = Literal["constraints", "derived_from", "dtype", "unique"] + +_Unset: Any = PydanticUndefinedAnnotation + + +class PatitoFieldInfo(fields.FieldInfo): + """ + !!! warning + Not intended to be used directly. Specified to facilitate proper type hinting, while preserving customizability of the generated `FieldInfo` objects. + """ + + constraints: pl.Expr | Sequence[pl.Expr] | None = _Unset + derived_from: str | pl.Expr | None = _Unset + dtype: PolarsDataType | None = _Unset + unique: bool | None = _Unset + + # A mapping from pydantic types to the equivalent type used in DuckDB PYDANTIC_TO_DUCKDB_TYPES = { "integer": "BIGINT", @@ -117,7 +136,7 @@ class Model(BaseModel, metaclass=ModelMetaclass): """Custom pydantic class for representing table schema and constructing rows.""" if TYPE_CHECKING: - model_fields: ClassVar[Dict[str, FieldInfo]] + model_fields: ClassVar[Dict[str, PatitoFieldInfo]] model_config = ConfigDict( ignored_types=(classproperty,), @@ -1471,8 +1490,9 @@ def _derive_model( @staticmethod def _derive_field( - field: FieldInfo, make_nullable: bool = False - ) -> Tuple[Type, FieldInfo]: + field: PatitoFieldInfo, + make_nullable: bool = False, + ) -> Tuple[Type, PatitoFieldInfo]: field_type = field.annotation default = field.default extra_attrs = { @@ -1498,61 +1518,93 @@ def _derive_field( return field_type, field_new -PT_INFO = Literal["constraints", "derived_from", "dtype", "unique"] +class PatitoFieldExtension(BaseModel): + """Stores type hints and default values for patito-custom field attributes""" + + constraints: pl.Expr | Sequence[pl.Expr] | None = _Unset + derived_from: str | pl.Expr | None = _Unset + dtype: PolarsDataType | None = _Unset + unique: bool | None = _Unset + + model_config = ConfigDict(arbitrary_types_allowed=True) -class FieldInfo(fields.FieldInfo): - __slots__ = getattr(fields.FieldInfo, "__slots__") + ( - "constraints", - "derived_from", - "dtype", - "unique", +def field_info(exts: Sequence[Type[BaseModel]]) -> Type[fields.FieldInfo]: + """generates a custom FieldInfo class, with extra attributes specified by the models passed as exts + + Parameters + ---------- + exts : Sequence[Type[BaseModel]] + pydantic models describing the extra attributes to be added to the FieldInfo class + + Returns + ------- + Type[fields.FieldInfo] + a child class of pydantic's `FieldInfo`, with slots and attributes updated to include the extra attributes + """ + ext_fields = reduce( + lambda x, y: x + y, [tuple(x.model_fields.keys()) for x in exts] ) - def __init__( - self, - constraints: Optional[Union[pl.Expr, Sequence[pl.Expr]]] = None, - derived_from: Optional[Union[str, pl.Expr]] = None, - dtype: Optional[PolarsDataType] = None, - unique: bool = False, + class FieldInfo(fields.FieldInfo): + __slots__ = getattr(fields.FieldInfo, "__slots__") + ext_fields + + def __init__(self, *args, **kwargs): + super().__init__( + *args, **kwargs + ) # processes base fields, popping associated kwargs + self._attributes_set.update( + **{k: v for k, v in kwargs.items() if v is not _Unset} + ) + for f in ext_fields: + self.__setattr__(f, kwargs.pop(f, None)) + + return FieldInfo + + +def field(exts: Sequence[Type[BaseModel]]) -> Callable: + """a `Field` callable factory that accepts extra attributes specified by the models passed as exts + + Parameters + ---------- + exts : Sequence[Type[BaseModel]] + pydantic models describing the extra attributes to be added to the FieldInfo class + + Returns + ------- + Callable + a custom factory method that splits arguments intended for pydantic's `Field` constructor and the extra attributes + """ + FieldInfo = field_info(exts=exts) + + def _Field( # noqa: C901 + *args, **kwargs, - ): - super().__init__(**kwargs) - - self.constraints = constraints - self.derived_from = derived_from - self.dtype = dtype - self.unique = unique - self._attributes_set.update( - **{ - k: getattr(self, k) - for k in get_args(PT_INFO) - if getattr(self, k) is not None - } + ) -> Any: + meta_kwargs = { + k: v for k, v in kwargs.items() if k in fields.FieldInfo.metadata_lookup + } + base_kwargs = { + k: v + for k, v in kwargs.items() + if k not in meta_kwargs and k in inspect.signature(fields.Field).parameters + } + finfo = fields.Field(*args, **base_kwargs) + new_kwargs = { + k: v + for k, v in kwargs.items() + if k not in meta_kwargs and k not in base_kwargs + } + return FieldInfo( + **finfo._attributes_set, + **meta_kwargs, + **new_kwargs, ) + return _Field -def Field( # noqa: C901 - *args, - constraints: Optional[Union[pl.Expr, Sequence[pl.Expr]]] = None, - derived_from: Optional[Union[str, pl.Expr]] = None, - dtype: Optional[PolarsDataType] = None, - unique: bool = False, - **kwargs, -) -> Any: - meta_kwargs = { - k: v for k, v in kwargs.items() if k in fields.FieldInfo.metadata_lookup - } - base_kwargs = {k: v for k, v in kwargs.items() if k not in meta_kwargs} - finfo = fields.Field(*args, **base_kwargs) - return FieldInfo( - **finfo._attributes_set, - **meta_kwargs, - constraints=constraints, - derived_from=derived_from, - dtype=dtype, - unique=unique, - ) + +Field = field(exts=[PatitoFieldExtension]) class FieldDoc: diff --git a/tests/test_model.py b/tests/test_model.py index 463892b..6db4885 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -3,14 +3,14 @@ import enum import re from datetime import date, datetime, timedelta -from typing import List, Optional, Type, Literal +from typing import List, Optional, Type, Literal, get_args import polars as pl import pytest -from pydantic import ValidationError +from pydantic import ValidationError, BaseModel, fields import patito as pt -from patito.pydantic import PL_INTEGER_DTYPES +from patito.pydantic import PL_INTEGER_DTYPES, PT_INFO, field, _Unset def test_model_example(): @@ -444,17 +444,43 @@ class Model(pt.Model): assert "dtype" in props["d"] assert "unique" in props["e"] + def check_repr(field, set_value: str) -> None: + assert f"{set_value}=" in repr(field) + assert all(x not in repr(field) for x in get_args(PT_INFO) if x != set_value) + fields = ( Model.model_fields ) # attributes are properly set and catalogued on the `FieldInfo` objects assert "constraints" in fields["b"]._attributes_set assert fields["b"].constraints is not None + check_repr(fields["b"], "constraints") assert "derived_from" in fields["c"]._attributes_set assert fields["c"].derived_from is not None + check_repr(fields["c"], "derived_from") assert "dtype" in fields["d"]._attributes_set assert fields["d"].dtype is not None + check_repr(fields["d"], "dtype") assert "unique" in fields["e"]._attributes_set assert fields["e"].unique is not None + check_repr(fields["e"], "unique") + + +def test_custom_field_info(): + class FieldExt(BaseModel): + foo: str | None = _Unset + + Field = field(exts=[FieldExt]) + + class Model(pt.Model): + bar: int = Field(foo="hello") + + test_field = Model.model_fields["bar"] + assert ( + test_field.foo == "hello" + ) # TODO passes but typing is unhappy here, can we make custom FieldInfo configurable? If users subclass `Model` then it is easy to reset the typing to point at their own `FieldInfo` implementation + assert "foo=" in repr(test_field) + with pytest.raises(AttributeError): + print(test_field.derived_from) # patito FieldInfo successfully overriden def test_nullable_columns(): From 386ba8385ec07c3db464010068aa1cb2933fcec9 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Wed, 8 Nov 2023 15:58:14 -0500 Subject: [PATCH 18/29] fix: make _PatitoFieldInfo private, ensure ext fields -> _schema_props --- src/patito/pydantic.py | 17 +++++++++-------- tests/test_model.py | 1 + 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 6dbdf2f..b295dd5 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -57,7 +57,7 @@ _Unset: Any = PydanticUndefinedAnnotation -class PatitoFieldInfo(fields.FieldInfo): +class _PatitoFieldInfo(fields.FieldInfo): """ !!! warning Not intended to be used directly. Specified to facilitate proper type hinting, while preserving customizability of the generated `FieldInfo` objects. @@ -136,7 +136,7 @@ class Model(BaseModel, metaclass=ModelMetaclass): """Custom pydantic class for representing table schema and constructing rows.""" if TYPE_CHECKING: - model_fields: ClassVar[Dict[str, PatitoFieldInfo]] + model_fields: ClassVar[Dict[str, _PatitoFieldInfo]] model_config = ConfigDict( ignored_types=(classproperty,), @@ -1439,10 +1439,11 @@ def _schema_properties(cls) -> Dict[str, Dict[str, Any]]: fields[field_name]["type"] = PYTHON_TO_PYDANTIC_TYPES[ type(field_info["const"]) ] - for f in get_args(PT_INFO): - v = getattr(cls.model_fields[field_name], f, None) - if v is not None: - fields[field_name][f] = v + for f in cls.model_fields[field_name]._attributes_set: + if f not in fields[field_name]: + v = getattr(cls.model_fields[field_name], f, None) + if v is not None: + fields[field_name][f] = v return fields @@ -1490,9 +1491,9 @@ def _derive_model( @staticmethod def _derive_field( - field: PatitoFieldInfo, + field: _PatitoFieldInfo, make_nullable: bool = False, - ) -> Tuple[Type, PatitoFieldInfo]: + ) -> Tuple[Type, _PatitoFieldInfo]: field_type = field.annotation default = field.default extra_attrs = { diff --git a/tests/test_model.py b/tests/test_model.py index 6db4885..5a45e6b 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -479,6 +479,7 @@ class Model(pt.Model): test_field.foo == "hello" ) # TODO passes but typing is unhappy here, can we make custom FieldInfo configurable? If users subclass `Model` then it is easy to reset the typing to point at their own `FieldInfo` implementation assert "foo=" in repr(test_field) + assert "foo" in Model._schema_properties()["bar"] with pytest.raises(AttributeError): print(test_field.derived_from) # patito FieldInfo successfully overriden From ee63672a471f20ab818153a204b3d51ea2ad1549 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Wed, 13 Dec 2023 16:26:52 -0500 Subject: [PATCH 19/29] wip: serialization wip: ColumnInfo API cleanup --- src/patito/exceptions.py | 3 - src/patito/polars.py | 6 +- src/patito/pydantic.py | 238 ++++++++++++++++++++------------------- src/patito/validators.py | 7 +- tests/test_model.py | 119 ++++++++++---------- tests/test_polars.py | 11 +- 6 files changed, 193 insertions(+), 191 deletions(-) diff --git a/src/patito/exceptions.py b/src/patito/exceptions.py index 29ecafa..33b3ca6 100644 --- a/src/patito/exceptions.py +++ b/src/patito/exceptions.py @@ -1,4 +1,3 @@ -import json from typing import ( TYPE_CHECKING, Any, @@ -10,9 +9,7 @@ Tuple, Type, Union, - Callable, TypedDict, - Iterable, ) from patito._pydantic.repr import Representation diff --git a/src/patito/polars.py b/src/patito/polars.py index 2bf210a..34ad561 100644 --- a/src/patito/polars.py +++ b/src/patito/polars.py @@ -406,9 +406,9 @@ def derive(self: DF) -> DF: └─────┴─────┴────────────┘ """ df = self.lazy() - for column_name, props in self.model._schema_properties().items(): - if "derived_from" in props: - derived_from = props["derived_from"] + for column_name, info in self.model.column_infos.items(): + if info.derived_from is not None: + derived_from = info.derived_from dtype = self.model.dtypes[column_name] if isinstance(derived_from, str): df = df.with_columns( diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index b295dd5..1f3eb19 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -11,7 +11,6 @@ Dict, List, Optional, - Set, Type, TypeVar, Union, @@ -22,12 +21,15 @@ Tuple, Callable, ) -from functools import reduce -import inspect +import json import polars as pl -from polars.datatypes import PolarsDataType -from pydantic import fields, PydanticUndefinedAnnotation +from polars.polars import dtype_str_repr +from polars.datatypes import PolarsDataType, convert, DataTypeClass, DataType +from pydantic import ( + fields, + field_serializer, +) from pydantic import ConfigDict, BaseModel, create_model # noqa: F401 from pydantic._internal._model_construction import ( ModelMetaclass as PydanticModelMetaclass, @@ -52,22 +54,6 @@ # Should be a typed subclass of Model. ModelType = TypeVar("ModelType", bound="Model") -PT_INFO = Literal["constraints", "derived_from", "dtype", "unique"] - -_Unset: Any = PydanticUndefinedAnnotation - - -class _PatitoFieldInfo(fields.FieldInfo): - """ - !!! warning - Not intended to be used directly. Specified to facilitate proper type hinting, while preserving customizability of the generated `FieldInfo` objects. - """ - - constraints: pl.Expr | Sequence[pl.Expr] | None = _Unset - derived_from: str | pl.Expr | None = _Unset - dtype: PolarsDataType | None = _Unset - unique: bool | None = _Unset - # A mapping from pydantic types to the equivalent type used in DuckDB PYDANTIC_TO_DUCKDB_TYPES = { @@ -105,7 +91,7 @@ class _PatitoFieldInfo(fields.FieldInfo): ] -class classproperty: +class classproperty: # TODO figure out how to get typing to carry through this decorator """Equivalent to @property, but works on a class (doesn't require an instance). https://github.com/pola-rs/polars/blob/8d29d3cebec713363db4ad5d782c74047e24314d/py-polars/polars/datatypes/classes.py#L25C12-L25C12 @@ -136,7 +122,7 @@ class Model(BaseModel, metaclass=ModelMetaclass): """Custom pydantic class for representing table schema and constructing rows.""" if TYPE_CHECKING: - model_fields: ClassVar[Dict[str, _PatitoFieldInfo]] + model_fields: ClassVar[Dict[str, fields.FieldInfo]] model_config = ConfigDict( ignored_types=(classproperty,), @@ -248,14 +234,17 @@ def _valid_dtypes( # noqa: C901 Returns: List of valid dtypes. None if no mapping exists. """ - if "dtype" in props: + column_info = cls.column_infos[column] + if ( + column_info.dtype is not None and "column_info" in props + ): # hack to make sure we only use dtype if we're in the outer scope def dtype_invalid(props: Dict) -> Tuple[bool, List[PolarsDataType]]: if "type" in props: valid_pl_types = cls._pydantic_type_to_valid_polars_types( column, props ) - if props["dtype"] not in valid_pl_types: + if column_info.dtype not in valid_pl_types: return True, valid_pl_types or [] elif "anyOf" in props: for sub_props in props["anyOf"]: @@ -265,17 +254,17 @@ def dtype_invalid(props: Dict) -> Tuple[bool, List[PolarsDataType]]: valid_pl_types = cls._pydantic_type_to_valid_polars_types( column, sub_props ) - if props["dtype"] not in valid_pl_types: + if column_info.dtype not in valid_pl_types: return True, valid_pl_types or [] return False, [] invalid, valid_pl_types = dtype_invalid(props) if invalid: raise ValueError( - f"Invalid dtype {props['dtype']} for column '{column}'. Allowable polars dtypes for {display_as_type(cls.model_fields[column].annotation)} are: {', '.join([str(x) for x in valid_pl_types])}." + f"Invalid dtype {column_info.dtype} for column '{column}'. Allowable polars dtypes for {display_as_type(cls.model_fields[column].annotation)} are: {', '.join([str(x) for x in valid_pl_types])}." ) return [ - props["dtype"], + column_info.dtype, ] elif "enum" in props and props["type"] == "string": return [pl.Categorical, pl.Utf8] @@ -604,8 +593,8 @@ def unique_columns( # type: ignore >>> sorted(Product.unique_columns) ['barcode', 'product_id'] """ - props = cls._schema_properties() - return {column for column in cls.columns if props[column].get("unique", False)} + infos = cls.column_infos + return {column for column in cls.columns if infos[column].unique} @classproperty def DataFrame( @@ -815,6 +804,7 @@ def example_value( # noqa: C901 """ field_data = cls._schema_properties() properties = field_data[field] + info = cls.column_infos[field] if "type" in properties: field_type = properties["type"] elif "anyOf" in properties: @@ -848,8 +838,8 @@ def example_value( # noqa: C901 upper = properties.get("maximum") or properties.get("exclusiveMaximum") # If the dtype is an unsigned integer type, we must return a positive value - if "dtype" in properties: - dtype = properties["dtype"] + if info.dtype: + dtype = info.dtype if dtype in (pl.UInt8, pl.UInt16, pl.UInt32, pl.UInt64): lower = 0 if lower is None else max(lower, 0) @@ -1399,6 +1389,29 @@ def with_fields( field_mapping=fields, ) + @classmethod + def model_json_schema(cls, *args, **kwargs) -> Dict[str, Any]: + schema = super().model_json_schema(*args, **kwargs) + # for k, v in schema["properties"].items(): + # if 'column_info' in v: + # schema["properties"][k] = {**schema['properties'][k].pop("column_info"), **schema["properties"][k]} + return schema + + @classproperty + def column_infos(cls) -> Dict[str, ColumnInfo]: + fields = cls.model_fields + + def get_column_info(field: fields.FieldInfo) -> ColumnInfo: + if field.json_schema_extra is None: + return ColumnInfo() + elif callable(field.json_schema_extra): + raise NotImplementedError( + "Callable json_schema_extra not supported by patito." + ) + return field.json_schema_extra["column_info"] + + return {k: get_column_info(v) for k, v in fields.items()} + @classmethod def _schema_properties(cls) -> Dict[str, Dict[str, Any]]: """ @@ -1439,11 +1452,11 @@ def _schema_properties(cls) -> Dict[str, Dict[str, Any]]: fields[field_name]["type"] = PYTHON_TO_PYDANTIC_TYPES[ type(field_info["const"]) ] - for f in cls.model_fields[field_name]._attributes_set: - if f not in fields[field_name]: - v = getattr(cls.model_fields[field_name], f, None) - if v is not None: - fields[field_name][f] = v + # for f in cls.model_fields[field_name]._attributes_set: + # if f not in fields[field_name]: + # v = getattr(cls.model_fields[field_name], f, None) + # if v is not None: + # fields[field_name][f] = v return fields @@ -1491,9 +1504,9 @@ def _derive_model( @staticmethod def _derive_field( - field: _PatitoFieldInfo, + field: fields.FieldInfo, make_nullable: bool = False, - ) -> Tuple[Type, _PatitoFieldInfo]: + ) -> Tuple[Type, fields.FieldInfo]: field_type = field.annotation default = field.default extra_attrs = { @@ -1514,98 +1527,91 @@ def _derive_field( # to make it clear that the field is still non-nullable and # required. default = ... - field_new = Field(default=default, **extra_attrs) + field_new = fields.Field(default=default, **extra_attrs) field_new.metadata = field.metadata return field_type, field_new -class PatitoFieldExtension(BaseModel): - """Stores type hints and default values for patito-custom field attributes""" +def parse_composite_dtype(dtype: DataTypeClass | DataType) -> str: + if dtype in pl.NESTED_DTYPES: # TODO deprecated, move onto lookup + return f"{convert.DataTypeMappings.DTYPE_TO_FFINAME[dtype.base_type()]}[{parse_composite_dtype(dtype.inner)}]" + elif dtype in pl.DATETIME_DTYPES: + return dtype_str_repr(dtype) + else: + return convert.DataTypeMappings.DTYPE_TO_FFINAME[dtype] - constraints: pl.Expr | Sequence[pl.Expr] | None = _Unset - derived_from: str | pl.Expr | None = _Unset - dtype: PolarsDataType | None = _Unset - unique: bool | None = _Unset - model_config = ConfigDict(arbitrary_types_allowed=True) +def dtype_from_string(v: str): + """for deserialization""" + # TODO test all dtypes + return convert.dtype_short_repr_to_dtype(v) -def field_info(exts: Sequence[Type[BaseModel]]) -> Type[fields.FieldInfo]: - """generates a custom FieldInfo class, with extra attributes specified by the models passed as exts +class ColumnInfo(BaseModel, arbitrary_types_allowed=True): + dtype: DataTypeClass | DataType | None = None # TODO polars migrating onto using instances? https://github.com/pola-rs/polars/issues/6163 + constraints: pl.Expr | Sequence[pl.Expr] | None = None + derived_from: str | pl.Expr | None = None + unique: bool | None = None - Parameters - ---------- - exts : Sequence[Type[BaseModel]] - pydantic models describing the extra attributes to be added to the FieldInfo class + # @model_serializer + # def serialize(self) -> Dict[str, Any]: + # return {k: v for k, v in self.__dict__.items() if v is not None} - Returns - ------- - Type[fields.FieldInfo] - a child class of pydantic's `FieldInfo`, with slots and attributes updated to include the extra attributes - """ - ext_fields = reduce( - lambda x, y: x + y, [tuple(x.model_fields.keys()) for x in exts] - ) - - class FieldInfo(fields.FieldInfo): - __slots__ = getattr(fields.FieldInfo, "__slots__") + ext_fields - - def __init__(self, *args, **kwargs): - super().__init__( - *args, **kwargs - ) # processes base fields, popping associated kwargs - self._attributes_set.update( - **{k: v for k, v in kwargs.items() if v is not _Unset} - ) - for f in ext_fields: - self.__setattr__(f, kwargs.pop(f, None)) - - return FieldInfo - - -def field(exts: Sequence[Type[BaseModel]]) -> Callable: - """a `Field` callable factory that accepts extra attributes specified by the models passed as exts + @field_serializer("constraints", "derived_from") + def serialize_exprs(self, exprs: str | pl.Expr | Sequence[pl.Expr] | None) -> Any: + if exprs is None: + return None + elif isinstance(exprs, str): + return exprs + elif isinstance(exprs, pl.Expr): + return self._serialize_expr(exprs) + elif isinstance(exprs, Sequence): + return [self._serialize_expr(c) for c in exprs] + else: + raise ValueError(f"Invalid type for exprs: {type(exprs)}") - Parameters - ---------- - exts : Sequence[Type[BaseModel]] - pydantic models describing the extra attributes to be added to the FieldInfo class + def _serialize_expr(self, expr: pl.Expr) -> Dict: + if isinstance(expr, pl.Expr): + return json.loads( + expr.meta.write_json(None) + ) # can we access the dictionary directly? + else: + raise ValueError(f"Invalid type for expr: {type(expr)}") - Returns - ------- - Callable - a custom factory method that splits arguments intended for pydantic's `Field` constructor and the extra attributes - """ - FieldInfo = field_info(exts=exts) + @field_serializer("dtype") + def serialize_dtype(self, dtype: DataTypeClass | DataType | None) -> Any: + """ - def _Field( # noqa: C901 + References + ---------- + [1] https://stackoverflow.com/questions/76572310/how-to-serialize-deserialize-polars-datatypes + """ + if dtype is None: + return None + elif isinstance(dtype, DataTypeClass) or isinstance(dtype, DataType): + return parse_composite_dtype(dtype) + else: + raise ValueError(f"Invalid type for dtype: {type(dtype)}") + + +def Field( + *args, + dtype: DataTypeClass + | DataType + | None = None, # TODO figure out how to make nice signature + constraints: pl.Expr | Sequence[pl.Expr] | None = None, + derived_from: str | pl.Expr | None = None, + unique: bool | None = None, + **kwargs, +) -> Any: + column_info = ColumnInfo( + dtype=dtype, constraints=constraints, derived_from=derived_from, unique=unique + ) + return fields.Field( *args, + json_schema_extra={"column_info": column_info}, **kwargs, - ) -> Any: - meta_kwargs = { - k: v for k, v in kwargs.items() if k in fields.FieldInfo.metadata_lookup - } - base_kwargs = { - k: v - for k, v in kwargs.items() - if k not in meta_kwargs and k in inspect.signature(fields.Field).parameters - } - finfo = fields.Field(*args, **base_kwargs) - new_kwargs = { - k: v - for k, v in kwargs.items() - if k not in meta_kwargs and k not in base_kwargs - } - return FieldInfo( - **finfo._attributes_set, - **meta_kwargs, - **new_kwargs, - ) - - return _Field - - -Field = field(exts=[PatitoFieldExtension]) + ) class FieldDoc: diff --git a/src/patito/validators.py b/src/patito/validators.py index 871e043..41e5be2 100644 --- a/src/patito/validators.py +++ b/src/patito/validators.py @@ -189,6 +189,7 @@ def _find_errors( # noqa: C901 valid_dtypes = schema.valid_dtypes dataframe_datatypes = dict(zip(dataframe.columns, dataframe.dtypes)) for column_name, column_properties in schema._schema_properties().items(): + column_info = schema.column_infos[column_name] if column_name not in dataframe.columns: continue @@ -220,7 +221,7 @@ def _find_errors( # noqa: C901 ) ) - if column_properties.get("unique", False): + if column_info.unique: # Coalescing to 0 in the case of dataframe of height 0 num_duplicated = dataframe[column_name].is_duplicated().sum() or 0 if num_duplicated > 0: @@ -278,8 +279,8 @@ def _find_errors( # noqa: C901 ) ) - if "constraints" in column_properties: - custom_constraints = column_properties["constraints"] + if column_info.constraints is not None: + custom_constraints = column_info.constraints if isinstance(custom_constraints, pl.Expr): custom_constraints = [custom_constraints] constraints = pl.all_horizontal( diff --git a/tests/test_model.py b/tests/test_model.py index 5a45e6b..644b61a 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -3,14 +3,14 @@ import enum import re from datetime import date, datetime, timedelta -from typing import List, Optional, Type, Literal, get_args +from typing import List, Optional, Type, Literal import polars as pl import pytest -from pydantic import ValidationError, BaseModel, fields +from pydantic import ValidationError import patito as pt -from patito.pydantic import PL_INTEGER_DTYPES, PT_INFO, field, _Unset +from patito.pydantic import PL_INTEGER_DTYPES def test_model_example(): @@ -427,61 +427,62 @@ class InvalidEnumModel(pt.Model): InvalidEnumModel.sql_types -def test_pt_fields(): - class Model(pt.Model): - a: int - b: int = pt.Field(constraints=[(pl.col("b") < 10)]) - c: int = pt.Field(derived_from=pl.col("a") + pl.col("b")) - d: int = pt.Field(dtype=pl.UInt8) - e: int = pt.Field(unique=True) - - schema = Model.model_json_schema() # no serialization issues - props = ( - Model._schema_properties() - ) # extra fields are stored in modified schema_properties - assert "constraints" in props["b"] - assert "derived_from" in props["c"] - assert "dtype" in props["d"] - assert "unique" in props["e"] - - def check_repr(field, set_value: str) -> None: - assert f"{set_value}=" in repr(field) - assert all(x not in repr(field) for x in get_args(PT_INFO) if x != set_value) - - fields = ( - Model.model_fields - ) # attributes are properly set and catalogued on the `FieldInfo` objects - assert "constraints" in fields["b"]._attributes_set - assert fields["b"].constraints is not None - check_repr(fields["b"], "constraints") - assert "derived_from" in fields["c"]._attributes_set - assert fields["c"].derived_from is not None - check_repr(fields["c"], "derived_from") - assert "dtype" in fields["d"]._attributes_set - assert fields["d"].dtype is not None - check_repr(fields["d"], "dtype") - assert "unique" in fields["e"]._attributes_set - assert fields["e"].unique is not None - check_repr(fields["e"], "unique") - - -def test_custom_field_info(): - class FieldExt(BaseModel): - foo: str | None = _Unset - - Field = field(exts=[FieldExt]) - - class Model(pt.Model): - bar: int = Field(foo="hello") - - test_field = Model.model_fields["bar"] - assert ( - test_field.foo == "hello" - ) # TODO passes but typing is unhappy here, can we make custom FieldInfo configurable? If users subclass `Model` then it is easy to reset the typing to point at their own `FieldInfo` implementation - assert "foo=" in repr(test_field) - assert "foo" in Model._schema_properties()["bar"] - with pytest.raises(AttributeError): - print(test_field.derived_from) # patito FieldInfo successfully overriden +# TODO new tests for ColumnInfo +# def test_pt_fields(): +# class Model(pt.Model): +# a: int +# b: int = pt.Field(constraints=[(pl.col("b") < 10)]) +# c: int = pt.Field(derived_from=pl.col("a") + pl.col("b")) +# d: int = pt.Field(dtype=pl.UInt8) +# e: int = pt.Field(unique=True) + +# schema = Model.model_json_schema() # no serialization issues +# props = ( +# Model._schema_properties() +# ) # extra fields are stored in modified schema_properties +# assert "constraints" in props["b"] +# assert "derived_from" in props["c"] +# assert "dtype" in props["d"] +# assert "unique" in props["e"] + +# def check_repr(field, set_value: str) -> None: +# assert f"{set_value}=" in repr(field) +# assert all(x not in repr(field) for x in get_args(ColumnInfo.model_fields) if x != set_value) + +# fields = ( +# Model.model_fields +# ) # attributes are properly set and catalogued on the `FieldInfo` objects +# assert "constraints" in fields["b"]._attributes_set +# assert fields["b"].constraints is not None +# check_repr(fields["b"], "constraints") +# assert "derived_from" in fields["c"]._attributes_set +# assert fields["c"].derived_from is not None +# check_repr(fields["c"], "derived_from") +# assert "dtype" in fields["d"]._attributes_set +# assert fields["d"].dtype is not None +# check_repr(fields["d"], "dtype") +# assert "unique" in fields["e"]._attributes_set +# assert fields["e"].unique is not None +# check_repr(fields["e"], "unique") + + +# def test_custom_field_info(): +# class FieldExt(BaseModel): +# foo: str | None = _Unset + +# Field = field(exts=[FieldExt]) + +# class Model(pt.Model): +# bar: int = Field(foo="hello") + +# test_field = Model.model_fields["bar"] +# assert ( +# test_field.foo == "hello" +# ) # TODO passes but typing is unhappy here, can we make custom FieldInfo configurable? If users subclass `Model` then it is easy to reset the typing to point at their own `FieldInfo` implementation +# assert "foo=" in repr(test_field) +# assert "foo" in Model._schema_properties()["bar"] +# with pytest.raises(AttributeError): +# print(test_field.derived_from) # patito FieldInfo successfully overriden def test_nullable_columns(): @@ -537,4 +538,4 @@ class Test(pt.Model): assert type(Test.valid_dtypes["date"][0]) == pl.Datetime assert Test.valid_dtypes["date"][0].time_unit == "us" - assert Test.valid_dtypes["date"][0].time_zone == None + assert Test.valid_dtypes["date"][0].time_zone is None diff --git a/tests/test_polars.py b/tests/test_polars.py index c288c0e..2eb094b 100644 --- a/tests/test_polars.py +++ b/tests/test_polars.py @@ -4,6 +4,7 @@ import polars as pl import pytest +from pydantic import ValidationError import patito as pt @@ -223,14 +224,10 @@ class DerivedModel(pt.Model): assert derived_df.frame_equal(correct_derived_df) # Non-compatible derive_from arguments should raise TypeError - class InvalidModel(pt.Model): - incompatible: int = pt.Field(derived_from=object) + with pytest.raises(ValidationError): - with pytest.raises( - TypeError, - match=r"Can not derive dataframe column from type \\.", - ): - InvalidModel.DataFrame().derive() + class InvalidModel(pt.Model): + incompatible: int = pt.Field(derived_from=object) def test_drop_method(): From daf8f5906c8993a8d5cbb902aa282ef1239bb22a Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Mon, 18 Dec 2023 15:44:48 -0500 Subject: [PATCH 20/29] wip: modularize annotation -> dtype and dtype validation working to minimize references to _schema_properties and work with more structured and customizable objects. should be easier to add new supported dtypes with this framework and debug in future wip: tests -> new dtype inference engine wip: dtype docstrings --- poetry.lock | 1055 ++++++++++++++-------------- pyproject.toml | 5 +- src/patito/__init__.py | 2 +- src/patito/_pydantic/dtypes.py | 362 ++++++++++ src/patito/_pydantic/repr.py | 12 +- src/patito/exceptions.py | 2 +- src/patito/pydantic.py | 424 ++++------- src/patito/validators.py | 10 +- tests/test_database.py | 3 +- tests/test_dtypes.py | 155 ++++ tests/test_duckdb/test_database.py | 3 +- tests/test_duckdb/test_relation.py | 3 +- tests/test_dummy_data.py | 11 +- tests/test_model.py | 219 ++---- tests/test_polars.py | 21 +- tests/test_validators.py | 35 +- 16 files changed, 1309 insertions(+), 1013 deletions(-) create mode 100644 src/patito/_pydantic/dtypes.py create mode 100644 tests/test_dtypes.py diff --git a/poetry.lock b/poetry.lock index 23d76e5..9c906ab 100644 --- a/poetry.lock +++ b/poetry.lock @@ -95,36 +95,35 @@ Sphinx = ">=2.2,<8.0" [[package]] name = "babel" -version = "2.13.1" +version = "2.14.0" description = "Internationalization utilities" optional = true python-versions = ">=3.7" files = [ - {file = "Babel-2.13.1-py3-none-any.whl", hash = "sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed"}, - {file = "Babel-2.13.1.tar.gz", hash = "sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900"}, + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, ] [package.dependencies] pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} -setuptools = {version = "*", markers = "python_version >= \"3.12\""} [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "bandit" -version = "1.7.5" +version = "1.7.6" description = "Security oriented static analyser for python code." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "bandit-1.7.5-py3-none-any.whl", hash = "sha256:75665181dc1e0096369112541a056c59d1c5f66f9bb74a8d686c3c362b83f549"}, - {file = "bandit-1.7.5.tar.gz", hash = "sha256:bdfc739baa03b880c2d15d0431b31c658ffc348e907fe197e54e0389dd59e11e"}, + {file = "bandit-1.7.6-py3-none-any.whl", hash = "sha256:36da17c67fc87579a5d20c323c8d0b1643a890a2b93f00b3d1229966624694ff"}, + {file = "bandit-1.7.6.tar.gz", hash = "sha256:72ce7bc9741374d96fb2f1c9a8960829885f1243ffde743de70a19cee353e8f3"}, ] [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=1.0.1" +GitPython = ">=3.1.30" PyYAML = ">=5.3.1" rich = "*" stevedore = ">=1.20.0" @@ -154,29 +153,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.10.1" +version = "23.12.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, - {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, - {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, - {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, - {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, - {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, - {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, - {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, - {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, - {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, - {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, - {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, + {file = "black-23.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67f19562d367468ab59bd6c36a72b2c84bc2f16b59788690e02bbcb140a77175"}, + {file = "black-23.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbd75d9f28a7283b7426160ca21c5bd640ca7cd8ef6630b4754b6df9e2da8462"}, + {file = "black-23.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:593596f699ca2dcbbbdfa59fcda7d8ad6604370c10228223cd6cf6ce1ce7ed7e"}, + {file = "black-23.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:12d5f10cce8dc27202e9a252acd1c9a426c83f95496c959406c96b785a92bb7d"}, + {file = "black-23.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e73c5e3d37e5a3513d16b33305713237a234396ae56769b839d7c40759b8a41c"}, + {file = "black-23.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba09cae1657c4f8a8c9ff6cfd4a6baaf915bb4ef7d03acffe6a2f6585fa1bd01"}, + {file = "black-23.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace64c1a349c162d6da3cef91e3b0e78c4fc596ffde9413efa0525456148873d"}, + {file = "black-23.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:72db37a2266b16d256b3ea88b9affcdd5c41a74db551ec3dd4609a59c17d25bf"}, + {file = "black-23.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fdf6f23c83078a6c8da2442f4d4eeb19c28ac2a6416da7671b72f0295c4a697b"}, + {file = "black-23.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39dda060b9b395a6b7bf9c5db28ac87b3c3f48d4fdff470fa8a94ab8271da47e"}, + {file = "black-23.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7231670266ca5191a76cb838185d9be59cfa4f5dd401b7c1c70b993c58f6b1b5"}, + {file = "black-23.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:193946e634e80bfb3aec41830f5d7431f8dd5b20d11d89be14b84a97c6b8bc75"}, + {file = "black-23.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bcf91b01ddd91a2fed9a8006d7baa94ccefe7e518556470cf40213bd3d44bbbc"}, + {file = "black-23.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:996650a89fe5892714ea4ea87bc45e41a59a1e01675c42c433a35b490e5aa3f0"}, + {file = "black-23.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdbff34c487239a63d86db0c9385b27cdd68b1bfa4e706aa74bb94a435403672"}, + {file = "black-23.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:97af22278043a6a1272daca10a6f4d36c04dfa77e61cbaaf4482e08f3640e9f0"}, + {file = "black-23.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ead25c273adfad1095a8ad32afdb8304933efba56e3c1d31b0fee4143a1e424a"}, + {file = "black-23.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c71048345bdbced456cddf1622832276d98a710196b842407840ae8055ade6ee"}, + {file = "black-23.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a832b6e00eef2c13b3239d514ea3b7d5cc3eaa03d0474eedcbbda59441ba5d"}, + {file = "black-23.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:6a82a711d13e61840fb11a6dfecc7287f2424f1ca34765e70c909a35ffa7fb95"}, + {file = "black-23.12.0-py3-none-any.whl", hash = "sha256:a7c07db8200b5315dc07e331dda4d889a56f6bf4db6a9c2a526fa3166a81614f"}, + {file = "black-23.12.0.tar.gz", hash = "sha256:330a327b422aca0634ecd115985c1c7fd7bdb5b5a2ef8aa9888a82e2ebe9437a"}, ] [package.dependencies] @@ -190,19 +193,19 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "blackdoc" -version = "0.3.8" +version = "0.3.9" description = "run black on documentation code snippets" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "blackdoc-0.3.8-py3-none-any.whl", hash = "sha256:c003a1b72d57692b343815c8b7a15e78977caba96c86060def892602b5aba083"}, - {file = "blackdoc-0.3.8.tar.gz", hash = "sha256:3c9d5534f92557a627a31550c7faec8363b5b0929bbb0ca3f5df179a81a9d6b2"}, + {file = "blackdoc-0.3.9-py3-none-any.whl", hash = "sha256:2de4ad14993705e6b1fd89b0aa117566ac0c6980f218d6012f22d7cf6f5b2f34"}, + {file = "blackdoc-0.3.9.tar.gz", hash = "sha256:b1da5ca93550081fc3f1b34bfdc731431ee74c378ea63abb31771a222ef5d053"}, ] [package.dependencies] @@ -235,112 +238,112 @@ redis = ["redis (>=2.10.5)"] [[package]] name = "certifi" -version = "2023.7.22" +version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] [[package]] name = "charset-normalizer" -version = "3.3.1" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.1.tar.gz", hash = "sha256:d9137a876020661972ca6eec0766d81aef8a5627df628b664b234b73396e727e"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8aee051c89e13565c6bd366813c386939f8e928af93c29fda4af86d25b73d8f8"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:352a88c3df0d1fa886562384b86f9a9e27563d4704ee0e9d56ec6fcd270ea690"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:223b4d54561c01048f657fa6ce41461d5ad8ff128b9678cfe8b2ecd951e3f8a2"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f861d94c2a450b974b86093c6c027888627b8082f1299dfd5a4bae8e2292821"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1171ef1fc5ab4693c5d151ae0fdad7f7349920eabbaca6271f95969fa0756c2d"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28f512b9a33235545fbbdac6a330a510b63be278a50071a336afc1b78781b147"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0e842112fe3f1a4ffcf64b06dc4c61a88441c2f02f373367f7b4c1aa9be2ad5"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f9bc2ce123637a60ebe819f9fccc614da1bcc05798bbbaf2dd4ec91f3e08846"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f194cce575e59ffe442c10a360182a986535fd90b57f7debfaa5c845c409ecc3"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9a74041ba0bfa9bc9b9bb2cd3238a6ab3b7618e759b41bd15b5f6ad958d17605"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b578cbe580e3b41ad17b1c428f382c814b32a6ce90f2d8e39e2e635d49e498d1"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6db3cfb9b4fcecb4390db154e75b49578c87a3b9979b40cdf90d7e4b945656e1"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:debb633f3f7856f95ad957d9b9c781f8e2c6303ef21724ec94bea2ce2fcbd056"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-win32.whl", hash = "sha256:87071618d3d8ec8b186d53cb6e66955ef2a0e4fa63ccd3709c0c90ac5a43520f"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:e372d7dfd154009142631de2d316adad3cc1c36c32a38b16a4751ba78da2a397"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae4070f741f8d809075ef697877fd350ecf0b7c5837ed68738607ee0a2c572cf"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58e875eb7016fd014c0eea46c6fa92b87b62c0cb31b9feae25cbbe62c919f54d"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dbd95e300367aa0827496fe75a1766d198d34385a58f97683fe6e07f89ca3e3c"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de0b4caa1c8a21394e8ce971997614a17648f94e1cd0640fbd6b4d14cab13a72"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:985c7965f62f6f32bf432e2681173db41336a9c2611693247069288bcb0c7f8b"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a15c1fe6d26e83fd2e5972425a772cca158eae58b05d4a25a4e474c221053e2d"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae55d592b02c4349525b6ed8f74c692509e5adffa842e582c0f861751701a673"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be4d9c2770044a59715eb57c1144dedea7c5d5ae80c68fb9959515037cde2008"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:851cf693fb3aaef71031237cd68699dded198657ec1e76a76eb8be58c03a5d1f"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:31bbaba7218904d2eabecf4feec0d07469284e952a27400f23b6628439439fa7"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:871d045d6ccc181fd863a3cd66ee8e395523ebfbc57f85f91f035f50cee8e3d4"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:501adc5eb6cd5f40a6f77fbd90e5ab915c8fd6e8c614af2db5561e16c600d6f3"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f5fb672c396d826ca16a022ac04c9dce74e00a1c344f6ad1a0fdc1ba1f332213"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-win32.whl", hash = "sha256:bb06098d019766ca16fc915ecaa455c1f1cd594204e7f840cd6258237b5079a8"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:8af5a8917b8af42295e86b64903156b4f110a30dca5f3b5aedea123fbd638bff"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae8e5142dcc7a49168f4055255dbcced01dc1714a90a21f87448dc8d90617d1"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5b70bab78accbc672f50e878a5b73ca692f45f5b5e25c8066d748c09405e6a55"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ceca5876032362ae73b83347be8b5dbd2d1faf3358deb38c9c88776779b2e2f"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34d95638ff3613849f473afc33f65c401a89f3b9528d0d213c7037c398a51296"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9edbe6a5bf8b56a4a84533ba2b2f489d0046e755c29616ef8830f9e7d9cf5728"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6a02a3c7950cafaadcd46a226ad9e12fc9744652cc69f9e5534f98b47f3bbcf"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10b8dd31e10f32410751b3430996f9807fc4d1587ca69772e2aa940a82ab571a"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edc0202099ea1d82844316604e17d2b175044f9bcb6b398aab781eba957224bd"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b891a2f68e09c5ef989007fac11476ed33c5c9994449a4e2c3386529d703dc8b"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:71ef3b9be10070360f289aea4838c784f8b851be3ba58cf796262b57775c2f14"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:55602981b2dbf8184c098bc10287e8c245e351cd4fdcad050bd7199d5a8bf514"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:46fb9970aa5eeca547d7aa0de5d4b124a288b42eaefac677bde805013c95725c"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:520b7a142d2524f999447b3a0cf95115df81c4f33003c51a6ab637cbda9d0bf4"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-win32.whl", hash = "sha256:8ec8ef42c6cd5856a7613dcd1eaf21e5573b2185263d87d27c8edcae33b62a61"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:baec8148d6b8bd5cee1ae138ba658c71f5b03e0d69d5907703e3e1df96db5e41"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63a6f59e2d01310f754c270e4a257426fe5a591dc487f1983b3bbe793cf6bac6"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d6bfc32a68bc0933819cfdfe45f9abc3cae3877e1d90aac7259d57e6e0f85b1"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f3100d86dcd03c03f7e9c3fdb23d92e32abbca07e7c13ebd7ddfbcb06f5991f"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39b70a6f88eebe239fa775190796d55a33cfb6d36b9ffdd37843f7c4c1b5dc67"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e12f8ee80aa35e746230a2af83e81bd6b52daa92a8afaef4fea4a2ce9b9f4fa"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b6cefa579e1237ce198619b76eaa148b71894fb0d6bcf9024460f9bf30fd228"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:61f1e3fb621f5420523abb71f5771a204b33c21d31e7d9d86881b2cffe92c47c"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4f6e2a839f83a6a76854d12dbebde50e4b1afa63e27761549d006fa53e9aa80e"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:1ec937546cad86d0dce5396748bf392bb7b62a9eeb8c66efac60e947697f0e58"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:82ca51ff0fc5b641a2d4e1cc8c5ff108699b7a56d7f3ad6f6da9dbb6f0145b48"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:633968254f8d421e70f91c6ebe71ed0ab140220469cf87a9857e21c16687c034"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-win32.whl", hash = "sha256:c0c72d34e7de5604df0fde3644cc079feee5e55464967d10b24b1de268deceb9"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:63accd11149c0f9a99e3bc095bbdb5a464862d77a7e309ad5938fbc8721235ae"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5a3580a4fdc4ac05f9e53c57f965e3594b2f99796231380adb2baaab96e22761"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2465aa50c9299d615d757c1c888bc6fef384b7c4aec81c05a0172b4400f98557"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb7cd68814308aade9d0c93c5bd2ade9f9441666f8ba5aa9c2d4b389cb5e2a45"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e43805ccafa0a91831f9cd5443aa34528c0c3f2cc48c4cb3d9a7721053874b"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:854cc74367180beb327ab9d00f964f6d91da06450b0855cbbb09187bcdb02de5"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c15070ebf11b8b7fd1bfff7217e9324963c82dbdf6182ff7050519e350e7ad9f"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4c99f98fc3a1835af8179dcc9013f93594d0670e2fa80c83aa36346ee763d2"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fb765362688821404ad6cf86772fc54993ec11577cd5a92ac44b4c2ba52155b"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dced27917823df984fe0c80a5c4ad75cf58df0fbfae890bc08004cd3888922a2"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a66bcdf19c1a523e41b8e9d53d0cedbfbac2e93c649a2e9502cb26c014d0980c"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ecd26be9f112c4f96718290c10f4caea6cc798459a3a76636b817a0ed7874e42"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3f70fd716855cd3b855316b226a1ac8bdb3caf4f7ea96edcccc6f484217c9597"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:17a866d61259c7de1bdadef418a37755050ddb4b922df8b356503234fff7932c"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-win32.whl", hash = "sha256:548eefad783ed787b38cb6f9a574bd8664468cc76d1538215d510a3cd41406cb"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:45f053a0ece92c734d874861ffe6e3cc92150e32136dd59ab1fb070575189c97"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bc791ec3fd0c4309a753f95bb6c749ef0d8ea3aea91f07ee1cf06b7b02118f2f"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8c61fb505c7dad1d251c284e712d4e0372cef3b067f7ddf82a7fa82e1e9a93"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2c092be3885a1b7899cd85ce24acedc1034199d6fca1483fa2c3a35c86e43041"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2000c54c395d9e5e44c99dc7c20a64dc371f777faf8bae4919ad3e99ce5253e"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cb50a0335382aac15c31b61d8531bc9bb657cfd848b1d7158009472189f3d62"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c30187840d36d0ba2893bc3271a36a517a717f9fd383a98e2697ee890a37c273"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe81b35c33772e56f4b6cf62cf4aedc1762ef7162a31e6ac7fe5e40d0149eb67"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0bf89afcbcf4d1bb2652f6580e5e55a840fdf87384f6063c4a4f0c95e378656"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:06cf46bdff72f58645434d467bf5228080801298fbba19fe268a01b4534467f5"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3c66df3f41abee950d6638adc7eac4730a306b022570f71dd0bd6ba53503ab57"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd805513198304026bd379d1d516afbf6c3c13f4382134a2c526b8b854da1c2e"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:9505dc359edb6a330efcd2be825fdb73ee3e628d9010597aa1aee5aa63442e97"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:31445f38053476a0c4e6d12b047b08ced81e2c7c712e5a1ad97bc913256f91b2"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-win32.whl", hash = "sha256:bd28b31730f0e982ace8663d108e01199098432a30a4c410d06fe08fdb9e93f4"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:555fe186da0068d3354cdf4bbcbc609b0ecae4d04c921cc13e209eece7720727"}, - {file = "charset_normalizer-3.3.1-py3-none-any.whl", hash = "sha256:800561453acdecedaac137bf09cd719c7a440b6800ec182f077bb8e7025fb708"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] @@ -385,63 +388,63 @@ files = [ [[package]] name = "coverage" -version = "7.3.2" +version = "7.3.3" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, - {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, - {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, - {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, - {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, - {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, - {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, - {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, - {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, - {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, - {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, - {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, - {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, - {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, + {file = "coverage-7.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d874434e0cb7b90f7af2b6e3309b0733cde8ec1476eb47db148ed7deeb2a9494"}, + {file = "coverage-7.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6621dccce8af666b8c4651f9f43467bfbf409607c604b840b78f4ff3619aeb"}, + {file = "coverage-7.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1367aa411afb4431ab58fd7ee102adb2665894d047c490649e86219327183134"}, + {file = "coverage-7.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f0f8f0c497eb9c9f18f21de0750c8d8b4b9c7000b43996a094290b59d0e7523"}, + {file = "coverage-7.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db0338c4b0951d93d547e0ff8d8ea340fecf5885f5b00b23be5aa99549e14cfd"}, + {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d31650d313bd90d027f4be7663dfa2241079edd780b56ac416b56eebe0a21aab"}, + {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9437a4074b43c177c92c96d051957592afd85ba00d3e92002c8ef45ee75df438"}, + {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9e17d9cb06c13b4f2ef570355fa45797d10f19ca71395910b249e3f77942a837"}, + {file = "coverage-7.3.3-cp310-cp310-win32.whl", hash = "sha256:eee5e741b43ea1b49d98ab6e40f7e299e97715af2488d1c77a90de4a663a86e2"}, + {file = "coverage-7.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:593efa42160c15c59ee9b66c5f27a453ed3968718e6e58431cdfb2d50d5ad284"}, + {file = "coverage-7.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c944cf1775235c0857829c275c777a2c3e33032e544bcef614036f337ac37bb"}, + {file = "coverage-7.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eda7f6e92358ac9e1717ce1f0377ed2b9320cea070906ece4e5c11d172a45a39"}, + {file = "coverage-7.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c854c1d2c7d3e47f7120b560d1a30c1ca221e207439608d27bc4d08fd4aeae8"}, + {file = "coverage-7.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:222b038f08a7ebed1e4e78ccf3c09a1ca4ac3da16de983e66520973443b546bc"}, + {file = "coverage-7.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff4800783d85bff132f2cc7d007426ec698cdce08c3062c8d501ad3f4ea3d16c"}, + {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fc200cec654311ca2c3f5ab3ce2220521b3d4732f68e1b1e79bef8fcfc1f2b97"}, + {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:307aecb65bb77cbfebf2eb6e12009e9034d050c6c69d8a5f3f737b329f4f15fb"}, + {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ffb0eacbadb705c0a6969b0adf468f126b064f3362411df95f6d4f31c40d31c1"}, + {file = "coverage-7.3.3-cp311-cp311-win32.whl", hash = "sha256:79c32f875fd7c0ed8d642b221cf81feba98183d2ff14d1f37a1bbce6b0347d9f"}, + {file = "coverage-7.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:243576944f7c1a1205e5cd658533a50eba662c74f9be4c050d51c69bd4532936"}, + {file = "coverage-7.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a2ac4245f18057dfec3b0074c4eb366953bca6787f1ec397c004c78176a23d56"}, + {file = "coverage-7.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f9191be7af41f0b54324ded600e8ddbcabea23e1e8ba419d9a53b241dece821d"}, + {file = "coverage-7.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31c0b1b8b5a4aebf8fcd227237fc4263aa7fa0ddcd4d288d42f50eff18b0bac4"}, + {file = "coverage-7.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee453085279df1bac0996bc97004771a4a052b1f1e23f6101213e3796ff3cb85"}, + {file = "coverage-7.3.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1191270b06ecd68b1d00897b2daddb98e1719f63750969614ceb3438228c088e"}, + {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:007a7e49831cfe387473e92e9ff07377f6121120669ddc39674e7244350a6a29"}, + {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:af75cf83c2d57717a8493ed2246d34b1f3398cb8a92b10fd7a1858cad8e78f59"}, + {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:811ca7373da32f1ccee2927dc27dc523462fd30674a80102f86c6753d6681bc6"}, + {file = "coverage-7.3.3-cp312-cp312-win32.whl", hash = "sha256:733537a182b5d62184f2a72796eb6901299898231a8e4f84c858c68684b25a70"}, + {file = "coverage-7.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:e995efb191f04b01ced307dbd7407ebf6e6dc209b528d75583277b10fd1800ee"}, + {file = "coverage-7.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbd8a5fe6c893de21a3c6835071ec116d79334fbdf641743332e442a3466f7ea"}, + {file = "coverage-7.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:50c472c1916540f8b2deef10cdc736cd2b3d1464d3945e4da0333862270dcb15"}, + {file = "coverage-7.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e9223a18f51d00d3ce239c39fc41410489ec7a248a84fab443fbb39c943616c"}, + {file = "coverage-7.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f501e36ac428c1b334c41e196ff6bd550c0353c7314716e80055b1f0a32ba394"}, + {file = "coverage-7.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:475de8213ed95a6b6283056d180b2442eee38d5948d735cd3d3b52b86dd65b92"}, + {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:afdcc10c01d0db217fc0a64f58c7edd635b8f27787fea0a3054b856a6dff8717"}, + {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fff0b2f249ac642fd735f009b8363c2b46cf406d3caec00e4deeb79b5ff39b40"}, + {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a1f76cfc122c9e0f62dbe0460ec9cc7696fc9a0293931a33b8870f78cf83a327"}, + {file = "coverage-7.3.3-cp38-cp38-win32.whl", hash = "sha256:757453848c18d7ab5d5b5f1827293d580f156f1c2c8cef45bfc21f37d8681069"}, + {file = "coverage-7.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:ad2453b852a1316c8a103c9c970db8fbc262f4f6b930aa6c606df9b2766eee06"}, + {file = "coverage-7.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b15e03b8ee6a908db48eccf4e4e42397f146ab1e91c6324da44197a45cb9132"}, + {file = "coverage-7.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:89400aa1752e09f666cc48708eaa171eef0ebe3d5f74044b614729231763ae69"}, + {file = "coverage-7.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c59a3e59fb95e6d72e71dc915e6d7fa568863fad0a80b33bc7b82d6e9f844973"}, + {file = "coverage-7.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ede881c7618f9cf93e2df0421ee127afdfd267d1b5d0c59bcea771cf160ea4a"}, + {file = "coverage-7.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3bfd2c2f0e5384276e12b14882bf2c7621f97c35320c3e7132c156ce18436a1"}, + {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7f3bad1a9313401ff2964e411ab7d57fb700a2d5478b727e13f156c8f89774a0"}, + {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:65d716b736f16e250435473c5ca01285d73c29f20097decdbb12571d5dfb2c94"}, + {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a702e66483b1fe602717020a0e90506e759c84a71dbc1616dd55d29d86a9b91f"}, + {file = "coverage-7.3.3-cp39-cp39-win32.whl", hash = "sha256:7fbf3f5756e7955174a31fb579307d69ffca91ad163467ed123858ce0f3fd4aa"}, + {file = "coverage-7.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cad9afc1644b979211989ec3ff7d82110b2ed52995c2f7263e7841c846a75348"}, + {file = "coverage-7.3.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:d299d379b676812e142fb57662a8d0d810b859421412b4d7af996154c00c31bb"}, + {file = "coverage-7.3.3.tar.gz", hash = "sha256:df04c64e58df96b4427db8d0559e95e2df3138c9916c96f9f6a4dd220db2fdb7"}, ] [package.dependencies] @@ -467,13 +470,13 @@ testing = ["cssselect", "importlib-resources", "jaraco.test (>=5.1)", "lxml", "p [[package]] name = "dict2css" -version = "0.3.0" +version = "0.3.0.post1" description = "A μ-library for constructing cascading style sheets from Python dictionaries." optional = true python-versions = ">=3.6" files = [ - {file = "dict2css-0.3.0-py3-none-any.whl", hash = "sha256:ef934ce73a225fdd5f811b484fe9e2dd768f7ef14a89fc8f4eb5672597131d00"}, - {file = "dict2css-0.3.0.tar.gz", hash = "sha256:1e8b1bf580dca2083198f88a60ec88c878a8829d760dfe45483ef80fe2905117"}, + {file = "dict2css-0.3.0.post1-py3-none-any.whl", hash = "sha256:f006a6b774c3e31869015122ae82c491fd25e7de4a75607a62aa3e798f837e0d"}, + {file = "dict2css-0.3.0.post1.tar.gz", hash = "sha256:89c544c21c4ca7472c3fffb9d37d3d926f606329afdb751dc1de67a411b70719"}, ] [package.dependencies] @@ -493,13 +496,13 @@ files = [ [[package]] name = "domdf-python-tools" -version = "3.7.0" +version = "3.8.0.post2" description = "Helpful functions for Python 🐍 🛠️" optional = true python-versions = ">=3.6" files = [ - {file = "domdf_python_tools-3.7.0-py3-none-any.whl", hash = "sha256:7b4d1c3bdb7402b872d43953824bf921ae2e52f893adbe5c0052a21a6efa2fe4"}, - {file = "domdf_python_tools-3.7.0.tar.gz", hash = "sha256:df1af9a91649af0fb2a4e7b3a4b0a0936e4f78389dd7280dd6fd2f53a339ca71"}, + {file = "domdf_python_tools-3.8.0.post2-py3-none-any.whl", hash = "sha256:ad2c763c8d00850a7fa92ad95e9891a1918281ea25322c4dbb1734fd32f905dd"}, + {file = "domdf_python_tools-3.8.0.post2.tar.gz", hash = "sha256:a1fd255ea29f767b08de462d2da39d360262304389227d980bc307ee8aa3366a"}, ] [package.dependencies] @@ -513,61 +516,61 @@ dates = ["pytz (>=2019.1)"] [[package]] name = "duckdb" -version = "0.9.1" +version = "0.9.2" description = "DuckDB embedded database" optional = true python-versions = ">=3.7.0" files = [ - {file = "duckdb-0.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6c724e105ecd78c8d86b3c03639b24e1df982392fc836705eb007e4b1b488864"}, - {file = "duckdb-0.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:75f12c5a3086079fb6440122565f1762ef1a610a954f2d8081014c1dd0646e1a"}, - {file = "duckdb-0.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:151f5410c32f8f8fe03bf23462b9604349bc0b4bd3a51049bbf5e6a482a435e8"}, - {file = "duckdb-0.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c1d066fdae22b9b711b1603541651a378017645f9fbc4adc9764b2f3c9e9e4a"}, - {file = "duckdb-0.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1de56d8b7bd7a7653428c1bd4b8948316df488626d27e9c388194f2e0d1428d4"}, - {file = "duckdb-0.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1fb6cd590b1bb4e31fde8efd25fedfbfa19a86fa72789fa5b31a71da0d95bce4"}, - {file = "duckdb-0.9.1-cp310-cp310-win32.whl", hash = "sha256:1039e073714d668cef9069bb02c2a6756c7969cedda0bff1332520c4462951c8"}, - {file = "duckdb-0.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:7e6ac4c28918e1d278a89ff26fd528882aa823868ed530df69d6c8a193ae4e41"}, - {file = "duckdb-0.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5eb750f2ee44397a61343f32ee9d9e8c8b5d053fa27ba4185d0e31507157f130"}, - {file = "duckdb-0.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aea2a46881d75dc069a242cb164642d7a4f792889010fb98210953ab7ff48849"}, - {file = "duckdb-0.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed3dcedfc7a9449b6d73f9a2715c730180056e0ba837123e7967be1cd3935081"}, - {file = "duckdb-0.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c55397bed0087ec4445b96f8d55f924680f6d40fbaa7f2e35468c54367214a5"}, - {file = "duckdb-0.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3261696130f1cfb955735647c93297b4a6241753fb0de26c05d96d50986c6347"}, - {file = "duckdb-0.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:64c04b1728e3e37cf93748829b5d1e028227deea75115bb5ead01c608ece44b1"}, - {file = "duckdb-0.9.1-cp311-cp311-win32.whl", hash = "sha256:12cf9fb441a32702e31534330a7b4d569083d46a91bf185e0c9415000a978789"}, - {file = "duckdb-0.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:fdfd85575ce9540e593d5d25c9d32050bd636c27786afd7b776aae0f6432b55e"}, - {file = "duckdb-0.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:704700a4b469e3bb1a7e85ac12e58037daaf2b555ef64a3fe2913ffef7bd585b"}, - {file = "duckdb-0.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf55b303b7b1a8c2165a96e609eb30484bc47481d94a5fb1e23123e728df0a74"}, - {file = "duckdb-0.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b70e23c14746904ca5de316436e43a685eb769c67fe3dbfaacbd3cce996c5045"}, - {file = "duckdb-0.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:77379f7f1f8b4dc98e01f8f6f8f15a0858cf456e2385e22507f3cb93348a88f9"}, - {file = "duckdb-0.9.1-cp37-cp37m-win32.whl", hash = "sha256:92c8f738489838666cae9ef41703f8b16f660bb146970d1eba8b2c06cb3afa39"}, - {file = "duckdb-0.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08c5484ac06ab714f745526d791141f547e2f5ac92f97a0a1b37dfbb3ea1bd13"}, - {file = "duckdb-0.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f66d3c07c7f6938d3277294677eb7dad75165e7c57c8dd505503fc5ef10f67ad"}, - {file = "duckdb-0.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c38044e5f78c0c7b58e9f937dcc6c34de17e9ca6be42f9f8f1a5a239f7a847a5"}, - {file = "duckdb-0.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73bc0d715b79566b3ede00c367235cfcce67be0eddda06e17665c7a233d6854a"}, - {file = "duckdb-0.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d26622c3b4ea6a8328d95882059e3cc646cdc62d267d48d09e55988a3bba0165"}, - {file = "duckdb-0.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3367d10096ff2b7919cedddcf60d308d22d6e53e72ee2702f6e6ca03d361004a"}, - {file = "duckdb-0.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d88a119f1cb41911a22f08a6f084d061a8c864e28b9433435beb50a56b0d06bb"}, - {file = "duckdb-0.9.1-cp38-cp38-win32.whl", hash = "sha256:99567496e45b55c67427133dc916013e8eb20a811fc7079213f5f03b2a4f5fc0"}, - {file = "duckdb-0.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:5b3da4da73422a3235c3500b3fb541ac546adb3e35642ef1119dbcd9cc7f68b8"}, - {file = "duckdb-0.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eca00c0c2062c0265c6c0e78ca2f6a30611b28f3afef062036610e9fc9d4a67d"}, - {file = "duckdb-0.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eb5af8e89d40fc4baab1515787ea1520a6c6cf6aa40ab9f107df6c3a75686ce1"}, - {file = "duckdb-0.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fae3d4f83ebcb47995f6acad7c6d57d003a9b6f0e1b31f79a3edd6feb377443"}, - {file = "duckdb-0.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16b9a7efc745bc3c5d1018c3a2f58d9e6ce49c0446819a9600fdba5f78e54c47"}, - {file = "duckdb-0.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b0b60167f5537772e9f5af940e69dcf50e66f5247732b8bb84a493a9af6055"}, - {file = "duckdb-0.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4f27f5e94c47df6c4ccddf18e3277b7464eea3db07356d2c4bf033b5c88359b8"}, - {file = "duckdb-0.9.1-cp39-cp39-win32.whl", hash = "sha256:d43cd7e6f783006b59dcc5e40fcf157d21ee3d0c8dfced35278091209e9974d7"}, - {file = "duckdb-0.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:e666795887d9cf1d6b6f6cbb9d487270680e5ff6205ebc54b2308151f13b8cff"}, - {file = "duckdb-0.9.1.tar.gz", hash = "sha256:603a878746015a3f2363a65eb48bcbec816261b6ee8d71eee53061117f6eef9d"}, + {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:aadcea5160c586704c03a8a796c06a8afffbefefb1986601104a60cb0bfdb5ab"}, + {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:08215f17147ed83cbec972175d9882387366de2ed36c21cbe4add04b39a5bcb4"}, + {file = "duckdb-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6c2a8aba6850abef5e1be9dbc04b8e72a5b2c2b67f77892317a21fae868fe7"}, + {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff49f3da9399900fd58b5acd0bb8bfad22c5147584ad2427a78d937e11ec9d0"}, + {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5ac5baf8597efd2bfa75f984654afcabcd698342d59b0e265a0bc6f267b3f0"}, + {file = "duckdb-0.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:81c6df905589a1023a27e9712edb5b724566587ef280a0c66a7ec07c8083623b"}, + {file = "duckdb-0.9.2-cp310-cp310-win32.whl", hash = "sha256:a298cd1d821c81d0dec8a60878c4b38c1adea04a9675fb6306c8f9083bbf314d"}, + {file = "duckdb-0.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:492a69cd60b6cb4f671b51893884cdc5efc4c3b2eb76057a007d2a2295427173"}, + {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:061a9ea809811d6e3025c5de31bc40e0302cfb08c08feefa574a6491e882e7e8"}, + {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a43f93be768af39f604b7b9b48891f9177c9282a408051209101ff80f7450d8f"}, + {file = "duckdb-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac29c8c8f56fff5a681f7bf61711ccb9325c5329e64f23cb7ff31781d7b50773"}, + {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b14d98d26bab139114f62ade81350a5342f60a168d94b27ed2c706838f949eda"}, + {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:796a995299878913e765b28cc2b14c8e44fae2f54ab41a9ee668c18449f5f833"}, + {file = "duckdb-0.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6cb64ccfb72c11ec9c41b3cb6181b6fd33deccceda530e94e1c362af5f810ba1"}, + {file = "duckdb-0.9.2-cp311-cp311-win32.whl", hash = "sha256:930740cb7b2cd9e79946e1d3a8f66e15dc5849d4eaeff75c8788d0983b9256a5"}, + {file = "duckdb-0.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:c28f13c45006fd525001b2011cdf91fa216530e9751779651e66edc0e446be50"}, + {file = "duckdb-0.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fbce7bbcb4ba7d99fcec84cec08db40bc0dd9342c6c11930ce708817741faeeb"}, + {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15a82109a9e69b1891f0999749f9e3265f550032470f51432f944a37cfdc908b"}, + {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9490fb9a35eb74af40db5569d90df8a04a6f09ed9a8c9caa024998c40e2506aa"}, + {file = "duckdb-0.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:696d5c6dee86c1a491ea15b74aafe34ad2b62dcd46ad7e03b1d00111ca1a8c68"}, + {file = "duckdb-0.9.2-cp37-cp37m-win32.whl", hash = "sha256:4f0935300bdf8b7631ddfc838f36a858c1323696d8c8a2cecbd416bddf6b0631"}, + {file = "duckdb-0.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0aab900f7510e4d2613263865570203ddfa2631858c7eb8cbed091af6ceb597f"}, + {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d8130ed6a0c9421b135d0743705ea95b9a745852977717504e45722c112bf7a"}, + {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:974e5de0294f88a1a837378f1f83330395801e9246f4e88ed3bfc8ada65dcbee"}, + {file = "duckdb-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4fbc297b602ef17e579bb3190c94d19c5002422b55814421a0fc11299c0c1100"}, + {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dd58a0d84a424924a35b3772419f8cd78a01c626be3147e4934d7a035a8ad68"}, + {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11a1194a582c80dfb57565daa06141727e415ff5d17e022dc5f31888a5423d33"}, + {file = "duckdb-0.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be45d08541002a9338e568dca67ab4f20c0277f8f58a73dfc1435c5b4297c996"}, + {file = "duckdb-0.9.2-cp38-cp38-win32.whl", hash = "sha256:dd6f88aeb7fc0bfecaca633629ff5c986ac966fe3b7dcec0b2c48632fd550ba2"}, + {file = "duckdb-0.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:28100c4a6a04e69aa0f4a6670a6d3d67a65f0337246a0c1a429f3f28f3c40b9a"}, + {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ae5bf0b6ad4278e46e933e51473b86b4b932dbc54ff097610e5b482dd125552"}, + {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e5d0bb845a80aa48ed1fd1d2d285dd352e96dc97f8efced2a7429437ccd1fe1f"}, + {file = "duckdb-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ce262d74a52500d10888110dfd6715989926ec936918c232dcbaddb78fc55b4"}, + {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6935240da090a7f7d2666f6d0a5e45ff85715244171ca4e6576060a7f4a1200e"}, + {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5cfb93e73911696a98b9479299d19cfbc21dd05bb7ab11a923a903f86b4d06e"}, + {file = "duckdb-0.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:64e3bc01751f31e7572d2716c3e8da8fe785f1cdc5be329100818d223002213f"}, + {file = "duckdb-0.9.2-cp39-cp39-win32.whl", hash = "sha256:6e5b80f46487636368e31b61461940e3999986359a78660a50dfdd17dd72017c"}, + {file = "duckdb-0.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:e6142a220180dbeea4f341708bd5f9501c5c962ce7ef47c1cadf5e8810b4cb13"}, + {file = "duckdb-0.9.2.tar.gz", hash = "sha256:3843afeab7c3fc4a4c0b53686a4cc1d9cdbdadcbb468d60fef910355ecafd447"}, ] [[package]] name = "exceptiongroup" -version = "1.1.3" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -575,19 +578,19 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.12.4" +version = "3.13.1" description = "A platform independent file lock." optional = true python-versions = ">=3.8" files = [ - {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, - {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] -typing = ["typing-extensions (>=4.7.1)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] [[package]] name = "flake8" @@ -676,12 +679,13 @@ dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", [[package]] name = "flake8-isort" -version = "6.1.0" -description = "flake8 plugin that integrates isort ." +version = "6.1.1" +description = "flake8 plugin that integrates isort" optional = false python-versions = ">=3.8" files = [ - {file = "flake8-isort-6.1.0.tar.gz", hash = "sha256:d4639343bac540194c59fb1618ac2c285b3e27609f353bef6f50904d40c1643e"}, + {file = "flake8_isort-6.1.1-py3-none-any.whl", hash = "sha256:0fec4dc3a15aefbdbe4012e51d5531a2eb5fa8b981cdfbc882296a59b54ede12"}, + {file = "flake8_isort-6.1.1.tar.gz", hash = "sha256:c1f82f3cf06a80c13e1d09bfae460e9666255d5c780b859f19f8318d420370b3"}, ] [package.dependencies] @@ -759,13 +763,13 @@ lxml = ["lxml"] [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] @@ -781,20 +785,20 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.8.0" +version = "7.0.0" description = "Read metadata from Python packages" optional = true python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, + {file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"}, + {file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] @@ -811,20 +815,17 @@ files = [ [[package]] name = "isort" -version = "5.12.0" +version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, ] [package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] +colors = ["colorama (>=0.4.6)"] [[package]] name = "jinja2" @@ -1041,38 +1042,38 @@ files = [ [[package]] name = "mypy" -version = "1.6.1" +version = "1.7.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, - {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, - {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, - {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"}, - {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"}, - {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"}, - {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"}, - {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"}, - {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"}, - {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"}, - {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"}, - {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"}, - {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"}, - {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"}, - {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"}, - {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"}, - {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"}, - {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"}, - {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"}, - {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"}, - {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"}, - {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"}, - {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"}, - {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"}, - {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, - {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, - {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, + {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, + {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, + {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, + {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, + {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, + {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, + {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, + {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, + {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, + {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, + {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, + {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, + {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, + {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, + {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, + {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, + {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, + {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, + {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, + {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, + {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, + {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, + {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, + {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, + {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, + {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, + {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, ] [package.dependencies] @@ -1083,6 +1084,7 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -1201,36 +1203,47 @@ files = [ [[package]] name = "numpy" -version = "1.25.2" +version = "1.26.2" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.25.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db3ccc4e37a6873045580d413fe79b68e47a681af8db2e046f1dacfa11f86eb3"}, - {file = "numpy-1.25.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:90319e4f002795ccfc9050110bbbaa16c944b1c37c0baeea43c5fb881693ae1f"}, - {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4a913e29b418d096e696ddd422d8a5d13ffba4ea91f9f60440a3b759b0187"}, - {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f08f2e037bba04e707eebf4bc934f1972a315c883a9e0ebfa8a7756eabf9e357"}, - {file = "numpy-1.25.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bec1e7213c7cb00d67093247f8c4db156fd03075f49876957dca4711306d39c9"}, - {file = "numpy-1.25.2-cp310-cp310-win32.whl", hash = "sha256:7dc869c0c75988e1c693d0e2d5b26034644399dd929bc049db55395b1379e044"}, - {file = "numpy-1.25.2-cp310-cp310-win_amd64.whl", hash = "sha256:834b386f2b8210dca38c71a6e0f4fd6922f7d3fcff935dbe3a570945acb1b545"}, - {file = "numpy-1.25.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5462d19336db4560041517dbb7759c21d181a67cb01b36ca109b2ae37d32418"}, - {file = "numpy-1.25.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5652ea24d33585ea39eb6a6a15dac87a1206a692719ff45d53c5282e66d4a8f"}, - {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d60fbae8e0019865fc4784745814cff1c421df5afee233db6d88ab4f14655a2"}, - {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e7f0f7f6d0eee8364b9a6304c2845b9c491ac706048c7e8cf47b83123b8dbf"}, - {file = "numpy-1.25.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bb33d5a1cf360304754913a350edda36d5b8c5331a8237268c48f91253c3a364"}, - {file = "numpy-1.25.2-cp311-cp311-win32.whl", hash = "sha256:5883c06bb92f2e6c8181df7b39971a5fb436288db58b5a1c3967702d4278691d"}, - {file = "numpy-1.25.2-cp311-cp311-win_amd64.whl", hash = "sha256:5c97325a0ba6f9d041feb9390924614b60b99209a71a69c876f71052521d42a4"}, - {file = "numpy-1.25.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b79e513d7aac42ae918db3ad1341a015488530d0bb2a6abcbdd10a3a829ccfd3"}, - {file = "numpy-1.25.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eb942bfb6f84df5ce05dbf4b46673ffed0d3da59f13635ea9b926af3deb76926"}, - {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e0746410e73384e70d286f93abf2520035250aad8c5714240b0492a7302fdca"}, - {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7806500e4f5bdd04095e849265e55de20d8cc4b661b038957354327f6d9b295"}, - {file = "numpy-1.25.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b77775f4b7df768967a7c8b3567e309f617dd5e99aeb886fa14dc1a0791141f"}, - {file = "numpy-1.25.2-cp39-cp39-win32.whl", hash = "sha256:2792d23d62ec51e50ce4d4b7d73de8f67a2fd3ea710dcbc8563a51a03fb07b01"}, - {file = "numpy-1.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:76b4115d42a7dfc5d485d358728cdd8719be33cc5ec6ec08632a5d6fca2ed380"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a1329e26f46230bf77b02cc19e900db9b52f398d6722ca853349a782d4cff55"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3abc71e8b6edba80a01a52e66d83c5d14433cbcd26a40c329ec7ed09f37901"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b9735c27cea5d995496f46a8b1cd7b408b3f34b6d50459d9ac8fe3a20cc17bf"}, - {file = "numpy-1.25.2.tar.gz", hash = "sha256:fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760"}, + {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"}, + {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"}, + {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"}, + {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"}, + {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"}, + {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"}, + {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"}, + {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"}, + {file = "numpy-1.26.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4eb8df4bf8d3d90d091e0146f6c28492b0be84da3e409ebef54349f71ed271ef"}, + {file = "numpy-1.26.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a13860fdcd95de7cf58bd6f8bc5a5ef81c0b0625eb2c9a783948847abbef2c2"}, + {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64308ebc366a8ed63fd0bf426b6a9468060962f1a4339ab1074c228fa6ade8e3"}, + {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818"}, + {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d73a3abcac238250091b11caef9ad12413dab01669511779bc9b29261dd50210"}, + {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b361d369fc7e5e1714cf827b731ca32bff8d411212fccd29ad98ad622449cc36"}, + {file = "numpy-1.26.2-cp39-cp39-win32.whl", hash = "sha256:bd3f0091e845164a20bd5a326860c840fe2af79fa12e0469a12768a3ec578d80"}, + {file = "numpy-1.26.2-cp39-cp39-win_amd64.whl", hash = "sha256:2beef57fb031dcc0dc8fa4fe297a742027b954949cabb52a2a376c144e5e6060"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"}, + {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, ] [[package]] @@ -1331,35 +1344,35 @@ types-pytz = ">=2022.1.1" [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "pbr" -version = "5.11.1" +version = "6.0.0" description = "Python Build Reasonableness" optional = false python-versions = ">=2.6" files = [ - {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, - {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, + {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, + {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, ] [[package]] name = "platformdirs" -version = "3.11.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, - {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -1383,25 +1396,25 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "polars" -version = "0.19.11" +version = "0.20.0" description = "Blazingly fast DataFrame library" optional = false python-versions = ">=3.8" files = [ - {file = "polars-0.19.11-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:de8158e5f09346ec4622057b7afa7e5339eed61c3c3e874b469c9cb27339df51"}, - {file = "polars-0.19.11-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c76c2107260a1ca8a57f02d77ea12dc4db2090d7404b814570474db0392ecf6b"}, - {file = "polars-0.19.11-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c6cf2aa2d301230a80277f8711646453b89eadd6058baf30b7104f420daad2"}, - {file = "polars-0.19.11-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ac2890c96736dee83335b1f0b403233aba18b86760505e81eb9f96112afc55d"}, - {file = "polars-0.19.11-cp38-abi3-win_amd64.whl", hash = "sha256:95be83cb0bbd2d608849e24a973ea3135bd25ae6ce7168e31ad25a02e7773122"}, - {file = "polars-0.19.11.tar.gz", hash = "sha256:156eab31d9f9bac218bbd391559c667848372a5c584472784695e4fac087fd5b"}, + {file = "polars-0.20.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:813707e28bdbdf00a849a72d856cad48a4063a1e7f4487bc7068f957988107ce"}, + {file = "polars-0.20.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:7504c63e9783de6cec289f89383b797a5e1fd72c10abc137bba7ee022d34f193"}, + {file = "polars-0.20.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e01beb3e3ad2c3efff209fafd5eff785489d1a6fb196aee2e29237d969389255"}, + {file = "polars-0.20.0-cp38-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:1d78313386fb46ab516eebb521c8612a9acb86c9972af195e771f468c0e5e77d"}, + {file = "polars-0.20.0-cp38-abi3-win_amd64.whl", hash = "sha256:8b8856c7368b605b77e9cf231340ab39a4af86a1dafddde92a2b913c0c47007e"}, + {file = "polars-0.20.0.tar.gz", hash = "sha256:cc70f7e4f195b438de54f51d411fa831d032d8528fc01fea186c8784f06d0f04"}, ] [package.extras] adbc = ["adbc_driver_sqlite"] all = ["polars[adbc,cloudpickle,connectorx,deltalake,fsspec,gevent,matplotlib,numpy,pandas,pyarrow,pydantic,pyiceberg,sqlalchemy,timezone,xlsx2csv,xlsxwriter]"] cloudpickle = ["cloudpickle"] -connectorx = ["connectorx"] -deltalake = ["deltalake (>=0.10.0)"] +connectorx = ["connectorx (>=0.3.2)"] +deltalake = ["deltalake (>=0.14.0)"] fsspec = ["fsspec"] gevent = ["gevent"] matplotlib = ["matplotlib"] @@ -1419,40 +1432,47 @@ xlsxwriter = ["xlsxwriter"] [[package]] name = "pyarrow" -version = "13.0.0" +version = "14.0.1" description = "Python library for Apache Arrow" optional = true python-versions = ">=3.8" files = [ - {file = "pyarrow-13.0.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:1afcc2c33f31f6fb25c92d50a86b7a9f076d38acbcb6f9e74349636109550148"}, - {file = "pyarrow-13.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:70fa38cdc66b2fc1349a082987f2b499d51d072faaa6b600f71931150de2e0e3"}, - {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd57b13a6466822498238877892a9b287b0a58c2e81e4bdb0b596dbb151cbb73"}, - {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ce69f7bf01de2e2764e14df45b8404fc6f1a5ed9871e8e08a12169f87b7a26"}, - {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:588f0d2da6cf1b1680974d63be09a6530fd1bd825dc87f76e162404779a157dc"}, - {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6241afd72b628787b4abea39e238e3ff9f34165273fad306c7acf780dd850956"}, - {file = "pyarrow-13.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:fda7857e35993673fcda603c07d43889fca60a5b254052a462653f8656c64f44"}, - {file = "pyarrow-13.0.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:aac0ae0146a9bfa5e12d87dda89d9ef7c57a96210b899459fc2f785303dcbb67"}, - {file = "pyarrow-13.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d7759994217c86c161c6a8060509cfdf782b952163569606bb373828afdd82e8"}, - {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:868a073fd0ff6468ae7d869b5fc1f54de5c4255b37f44fb890385eb68b68f95d"}, - {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51be67e29f3cfcde263a113c28e96aa04362ed8229cb7c6e5f5c719003659d33"}, - {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:d1b4e7176443d12610874bb84d0060bf080f000ea9ed7c84b2801df851320295"}, - {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:69b6f9a089d116a82c3ed819eea8fe67dae6105f0d81eaf0fdd5e60d0c6e0944"}, - {file = "pyarrow-13.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:ab1268db81aeb241200e321e220e7cd769762f386f92f61b898352dd27e402ce"}, - {file = "pyarrow-13.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:ee7490f0f3f16a6c38f8c680949551053c8194e68de5046e6c288e396dccee80"}, - {file = "pyarrow-13.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3ad79455c197a36eefbd90ad4aa832bece7f830a64396c15c61a0985e337287"}, - {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68fcd2dc1b7d9310b29a15949cdd0cb9bc34b6de767aff979ebf546020bf0ba0"}, - {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc6fd330fd574c51d10638e63c0d00ab456498fc804c9d01f2a61b9264f2c5b2"}, - {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:e66442e084979a97bb66939e18f7b8709e4ac5f887e636aba29486ffbf373763"}, - {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:0f6eff839a9e40e9c5610d3ff8c5bdd2f10303408312caf4c8003285d0b49565"}, - {file = "pyarrow-13.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b30a27f1cddf5c6efcb67e598d7823a1e253d743d92ac32ec1eb4b6a1417867"}, - {file = "pyarrow-13.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:09552dad5cf3de2dc0aba1c7c4b470754c69bd821f5faafc3d774bedc3b04bb7"}, - {file = "pyarrow-13.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3896ae6c205d73ad192d2fc1489cd0edfab9f12867c85b4c277af4d37383c18c"}, - {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6647444b21cb5e68b593b970b2a9a07748dd74ea457c7dadaa15fd469c48ada1"}, - {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47663efc9c395e31d09c6aacfa860f4473815ad6804311c5433f7085415d62a7"}, - {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:b9ba6b6d34bd2563345488cf444510588ea42ad5613df3b3509f48eb80250afd"}, - {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:d00d374a5625beeb448a7fa23060df79adb596074beb3ddc1838adb647b6ef09"}, - {file = "pyarrow-13.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:c51afd87c35c8331b56f796eff954b9c7f8d4b7fef5903daf4e05fcf017d23a8"}, - {file = "pyarrow-13.0.0.tar.gz", hash = "sha256:83333726e83ed44b0ac94d8d7a21bbdee4a05029c3b1e8db58a863eec8fd8a33"}, + {file = "pyarrow-14.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:96d64e5ba7dceb519a955e5eeb5c9adcfd63f73a56aea4722e2cc81364fc567a"}, + {file = "pyarrow-14.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a8ae88c0038d1bc362a682320112ee6774f006134cd5afc291591ee4bc06505"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f6f053cb66dc24091f5511e5920e45c83107f954a21032feadc7b9e3a8e7851"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:906b0dc25f2be12e95975722f1e60e162437023f490dbd80d0deb7375baf3171"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:78d4a77a46a7de9388b653af1c4ce539350726cd9af62e0831e4f2bd0c95a2f4"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06ca79080ef89d6529bb8e5074d4b4f6086143b2520494fcb7cf8a99079cde93"}, + {file = "pyarrow-14.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:32542164d905002c42dff896efdac79b3bdd7291b1b74aa292fac8450d0e4dcd"}, + {file = "pyarrow-14.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c7331b4ed3401b7ee56f22c980608cf273f0380f77d0f73dd3c185f78f5a6220"}, + {file = "pyarrow-14.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:922e8b49b88da8633d6cac0e1b5a690311b6758d6f5d7c2be71acb0f1e14cd61"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58c889851ca33f992ea916b48b8540735055201b177cb0dcf0596a495a667b00"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30d8494870d9916bb53b2a4384948491444741cb9a38253c590e21f836b01222"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:be28e1a07f20391bb0b15ea03dcac3aade29fc773c5eb4bee2838e9b2cdde0cb"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:981670b4ce0110d8dcb3246410a4aabf5714db5d8ea63b15686bce1c914b1f83"}, + {file = "pyarrow-14.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:4756a2b373a28f6166c42711240643fb8bd6322467e9aacabd26b488fa41ec23"}, + {file = "pyarrow-14.0.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:cf87e2cec65dd5cf1aa4aba918d523ef56ef95597b545bbaad01e6433851aa10"}, + {file = "pyarrow-14.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:470ae0194fbfdfbf4a6b65b4f9e0f6e1fa0ea5b90c1ee6b65b38aecee53508c8"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6263cffd0c3721c1e348062997babdf0151301f7353010c9c9a8ed47448f82ab"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8089d7e77d1455d529dbd7cff08898bbb2666ee48bc4085203af1d826a33cc"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fada8396bc739d958d0b81d291cfd201126ed5e7913cb73de6bc606befc30226"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2a145dab9ed7849fc1101bf03bcdc69913547f10513fdf70fc3ab6c0a50c7eee"}, + {file = "pyarrow-14.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:05fe7994745b634c5fb16ce5717e39a1ac1fac3e2b0795232841660aa76647cd"}, + {file = "pyarrow-14.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:a8eeef015ae69d104c4c3117a6011e7e3ecd1abec79dc87fd2fac6e442f666ee"}, + {file = "pyarrow-14.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c76807540989fe8fcd02285dd15e4f2a3da0b09d27781abec3adc265ddbeba1"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:450e4605e3c20e558485f9161a79280a61c55efe585d51513c014de9ae8d393f"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:323cbe60210173ffd7db78bfd50b80bdd792c4c9daca8843ef3cd70b186649db"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0140c7e2b740e08c5a459439d87acd26b747fc408bde0a8806096ee0baaa0c15"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:e592e482edd9f1ab32f18cd6a716c45b2c0f2403dc2af782f4e9674952e6dd27"}, + {file = "pyarrow-14.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d264ad13605b61959f2ae7c1d25b1a5b8505b112715c961418c8396433f213ad"}, + {file = "pyarrow-14.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01e44de9749cddc486169cb632f3c99962318e9dacac7778315a110f4bf8a450"}, + {file = "pyarrow-14.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0351fecf0e26e152542bc164c22ea2a8e8c682726fce160ce4d459ea802d69c"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c1f6110c386464fd2e5e4ea3624466055bbe681ff185fd6c9daa98f30a3f9a"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11e045dfa09855b6d3e7705a37c42e2dc2c71d608fab34d3c23df2e02df9aec3"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:097828b55321897db0e1dbfc606e3ff8101ae5725673498cbfa7754ee0da80e4"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1daab52050a1c48506c029e6fa0944a7b2436334d7e44221c16f6f1b2cc9c510"}, + {file = "pyarrow-14.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3f6d5faf4f1b0d5a7f97be987cf9e9f8cd39902611e818fe134588ee99bf0283"}, + {file = "pyarrow-14.0.1.tar.gz", hash = "sha256:b8b3f4fe8d4ec15e1ef9b599b94683c5216adaed78d5cb4c606180546d1e2ee1"}, ] [package.dependencies] @@ -1471,18 +1491,18 @@ files = [ [[package]] name = "pydantic" -version = "2.4.2" +version = "2.5.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"}, - {file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"}, + {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, + {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.10.1" +pydantic-core = "2.14.5" typing-extensions = ">=4.6.1" [package.extras] @@ -1490,117 +1510,116 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.10.1" +version = "2.14.5" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic_core-2.10.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63"}, - {file = "pydantic_core-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6"}, - {file = "pydantic_core-2.10.1-cp310-none-win32.whl", hash = "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b"}, - {file = "pydantic_core-2.10.1-cp310-none-win_amd64.whl", hash = "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0"}, - {file = "pydantic_core-2.10.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea"}, - {file = "pydantic_core-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4"}, - {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607"}, - {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f"}, - {file = "pydantic_core-2.10.1-cp311-none-win32.whl", hash = "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6"}, - {file = "pydantic_core-2.10.1-cp311-none-win_amd64.whl", hash = "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27"}, - {file = "pydantic_core-2.10.1-cp311-none-win_arm64.whl", hash = "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325"}, - {file = "pydantic_core-2.10.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921"}, - {file = "pydantic_core-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d"}, - {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f"}, - {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c"}, - {file = "pydantic_core-2.10.1-cp312-none-win32.whl", hash = "sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f"}, - {file = "pydantic_core-2.10.1-cp312-none-win_amd64.whl", hash = "sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430"}, - {file = "pydantic_core-2.10.1-cp312-none-win_arm64.whl", hash = "sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f"}, - {file = "pydantic_core-2.10.1-cp37-none-win32.whl", hash = "sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c"}, - {file = "pydantic_core-2.10.1-cp37-none-win_amd64.whl", hash = "sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e"}, - {file = "pydantic_core-2.10.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc"}, - {file = "pydantic_core-2.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e"}, - {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561"}, - {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de"}, - {file = "pydantic_core-2.10.1-cp38-none-win32.whl", hash = "sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee"}, - {file = "pydantic_core-2.10.1-cp38-none-win_amd64.whl", hash = "sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e"}, - {file = "pydantic_core-2.10.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970"}, - {file = "pydantic_core-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429"}, - {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7"}, - {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595"}, - {file = "pydantic_core-2.10.1-cp39-none-win32.whl", hash = "sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a"}, - {file = "pydantic_core-2.10.1-cp39-none-win_amd64.whl", hash = "sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776"}, - {file = "pydantic_core-2.10.1.tar.gz", hash = "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, + {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, + {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, + {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, + {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, + {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, + {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, + {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, + {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, + {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, + {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, + {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, + {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, + {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, + {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, + {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, ] [package.dependencies] @@ -1619,27 +1638,28 @@ files = [ [[package]] name = "pygments" -version = "2.16.1" +version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, - {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyright" -version = "1.1.333" +version = "1.1.341" description = "Command line wrapper for pyright" optional = false python-versions = ">=3.7" files = [ - {file = "pyright-1.1.333-py3-none-any.whl", hash = "sha256:f0a7b7b0cac11c396b17ef3cf6c8527aca1269edaf5cf8203eed7d6dd1ef52aa"}, - {file = "pyright-1.1.333.tar.gz", hash = "sha256:1c49b0029048120c4378f3baf6c1dcbbfb221678bb69654fe773c514430ac53c"}, + {file = "pyright-1.1.341-py3-none-any.whl", hash = "sha256:f5800daf9d5780ebf6c6e04064a6d20da99c0ef16efd77526f83cc8d8551ff9f"}, + {file = "pyright-1.1.341.tar.gz", hash = "sha256:b891721f3abd10635cc4fd3076bcff5b7676567dc3a629997ed59a0d30034a87"}, ] [package.dependencies] @@ -1811,13 +1831,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.6.0" +version = "13.7.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, - {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, + {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, + {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, ] [package.dependencies] @@ -1830,13 +1850,13 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruamel-yaml" -version = "0.18.2" +version = "0.18.5" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" optional = true -python-versions = ">=3" +python-versions = ">=3.7" files = [ - {file = "ruamel.yaml-0.18.2-py3-none-any.whl", hash = "sha256:92076ac8a83dbf44ca661dbed3c935229c8cbc2f10b05959dd3bd5292d8353d3"}, - {file = "ruamel.yaml-0.18.2.tar.gz", hash = "sha256:9bce33f7a814cea4c29a9c62fe872d2363d6220b767891d956eacea8fa5e6fe8"}, + {file = "ruamel.yaml-0.18.5-py3-none-any.whl", hash = "sha256:a013ac02f99a69cdd6277d9664689eb1acba07069f912823177c5eced21a6ada"}, + {file = "ruamel.yaml-0.18.5.tar.gz", hash = "sha256:61917e3a35a569c1133a8f772e1226961bf5a1198bea7e23f06a0841dea1ab0e"}, ] [package.dependencies] @@ -1893,17 +1913,17 @@ files = [ [[package]] name = "setuptools" -version = "68.2.2" +version = "69.0.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, - {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, + {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, + {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] @@ -2026,13 +2046,13 @@ type-comment = ["typed-ast (>=1.5.4)"] [[package]] name = "sphinx-jinja2-compat" -version = "0.2.0" +version = "0.2.0.post1" description = "Patches Jinja2 v3 to restore compatibility with earlier Sphinx versions." optional = true python-versions = ">=3.6" files = [ - {file = "sphinx_jinja2_compat-0.2.0-py3-none-any.whl", hash = "sha256:a5f3112d6873991c2cf28e37287163a0485d9c0812863b8aa4df7182722501fb"}, - {file = "sphinx_jinja2_compat-0.2.0.tar.gz", hash = "sha256:c41346d859653e202b623f4236da8936243ed734abf5984adc3bef59d6f9a946"}, + {file = "sphinx_jinja2_compat-0.2.0.post1-py3-none-any.whl", hash = "sha256:f9d329174bdde8db19dc12c62528367196eb2f6b46c91754eca604acd0c0f6ad"}, + {file = "sphinx_jinja2_compat-0.2.0.post1.tar.gz", hash = "sha256:974289a12a9f402108dead621e9c15f7004e945d5cfcaea8d6419e94d3fa95a3"}, ] [package.dependencies] @@ -2055,18 +2075,18 @@ Sphinx = "*" [[package]] name = "sphinx-rtd-theme" -version = "1.3.0" +version = "2.0.0" description = "Read the Docs theme for Sphinx" optional = true -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.6" files = [ - {file = "sphinx_rtd_theme-1.3.0-py2.py3-none-any.whl", hash = "sha256:46ddef89cc2416a81ecfbeaceab1881948c014b1b6e4450b815311a89fb977b0"}, - {file = "sphinx_rtd_theme-1.3.0.tar.gz", hash = "sha256:590b030c7abb9cf038ec053b95e5380b5c70d61591eb0b552063fbe7c41f0931"}, + {file = "sphinx_rtd_theme-2.0.0-py2.py3-none-any.whl", hash = "sha256:ec93d0856dc280cf3aee9a4c9807c60e027c7f7b461b77aeffed682e68f0e586"}, + {file = "sphinx_rtd_theme-2.0.0.tar.gz", hash = "sha256:bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b"}, ] [package.dependencies] -docutils = "<0.19" -sphinx = ">=1.6,<8" +docutils = "<0.21" +sphinx = ">=5,<8" sphinxcontrib-jquery = ">=4,<5" [package.extras] @@ -2281,22 +2301,22 @@ files = [ [[package]] name = "tornado" -version = "6.3.3" +version = "6.4" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = true python-versions = ">= 3.8" files = [ - {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"}, - {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"}, - {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"}, - {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"}, - {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, + {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, + {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, + {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, ] [[package]] @@ -2312,24 +2332,24 @@ files = [ [[package]] name = "types-setuptools" -version = "68.2.0.0" +version = "69.0.0.0" description = "Typing stubs for setuptools" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "types-setuptools-68.2.0.0.tar.gz", hash = "sha256:a4216f1e2ef29d089877b3af3ab2acf489eb869ccaf905125c69d2dc3932fd85"}, - {file = "types_setuptools-68.2.0.0-py3-none-any.whl", hash = "sha256:77edcc843e53f8fc83bb1a840684841f3dc804ec94562623bfa2ea70d5a2ba1b"}, + {file = "types-setuptools-69.0.0.0.tar.gz", hash = "sha256:b0a06219f628c6527b2f8ce770a4f47550e00d3e8c3ad83e2dc31bc6e6eda95d"}, + {file = "types_setuptools-69.0.0.0-py3-none-any.whl", hash = "sha256:8c86195bae2ad81e6dea900a570fe9d64a59dbce2b11cc63c046b03246ea77bf"}, ] [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] [[package]] @@ -2345,18 +2365,17 @@ files = [ [[package]] name = "urllib3" -version = "2.0.7" +version = "2.1.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, - {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, + {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, + {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2457,4 +2476,4 @@ pandas = ["pandas"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "da489708a531a87851eba1b1cebd18e12115acc0cf2e41bf56b123d0d2928a55" +content-hash = "12b075a3eab1fa43746724a0a52aed015c79c18cc4c53c02002e1cef665e9b83" diff --git a/pyproject.toml b/pyproject.toml index c0b4261..3716242 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ keywords = ["validation", "dataframe"] [tool.poetry.dependencies] python = "^3.8" pydantic = ">=2.0.0" -polars = ">=0.19.0" +polars = ">=0.20.0" # Required for typing.Literal in python3.7 typing-extensions = "*" pandas = {version = "*", optional = true, python = "^3.8"} @@ -134,3 +134,6 @@ disable_error_code = [ [[tool.mypy.overrides]] module = ["tests.test_validators"] warn_unused_ignores = false + +[tool.ruff.lint] +select = ["I"] \ No newline at end of file diff --git a/src/patito/__init__.py b/src/patito/__init__.py index 1ee3f5c..e29245b 100644 --- a/src/patito/__init__.py +++ b/src/patito/__init__.py @@ -2,9 +2,9 @@ from polars import Expr, Series, col from patito import exceptions, sql +from patito.exceptions import DataFrameValidationError from patito.polars import DataFrame, LazyFrame from patito.pydantic import Field, Model -from patito.exceptions import DataFrameValidationError _CACHING_AVAILABLE = False _DUCKDB_AVAILABLE = False diff --git a/src/patito/_pydantic/dtypes.py b/src/patito/_pydantic/dtypes.py new file mode 100644 index 0000000..d806796 --- /dev/null +++ b/src/patito/_pydantic/dtypes.py @@ -0,0 +1,362 @@ +import itertools +from enum import Enum +from typing import ( + Any, + Dict, + FrozenSet, + List, + Literal, + Optional, + Sequence, + Tuple, + cast, + get_args, +) + +import polars as pl +from polars.datatypes import DataType, DataTypeClass, DataTypeGroup, convert +from polars.datatypes.constants import ( + DATETIME_DTYPES, + DURATION_DTYPES, + FLOAT_DTYPES, + INTEGER_DTYPES, +) +from polars.polars import ( + dtype_str_repr, # TODO: this is a rust function, can we implement our own string parser for Time/Duration/Datetime? +) +from pydantic import TypeAdapter + +from patito._pydantic.repr import display_as_type + +PYTHON_TO_PYDANTIC_TYPES = { + str: "string", + int: "integer", + float: "number", + bool: "boolean", + type(None): "null", +} + +BOOLEAN_DTYPES = DataTypeGroup([pl.Boolean]) +STRING_DTYPES = DataTypeGroup([pl.Utf8]) +DATE_DTYPES = DataTypeGroup([pl.Date]) +TIME_DTYPES = DataTypeGroup([pl.Time]) + +PT_BASE_SUPPORTED_DTYPES = DataTypeGroup( + INTEGER_DTYPES + | FLOAT_DTYPES + | BOOLEAN_DTYPES + | STRING_DTYPES + | DATE_DTYPES + | DATETIME_DTYPES + | DURATION_DTYPES + | TIME_DTYPES +) + + +class PydanticBaseType(Enum): + STRING = "string" + INTEGER = "integer" + NUMBER = "number" + BOOLEAN = "boolean" + NULL = "null" + OBJECT = "object" + + +class PydanticStringFormat(Enum): + DATE = "date" + DATE_TIME = "date-time" + DURATION = "duration" + TIME = "time" + + +def parse_composite_dtype(dtype: DataTypeClass | DataType) -> str: + """for serialization, converts polars dtype to string representation + """ + if dtype in pl.NESTED_DTYPES: + if dtype == pl.Struct or isinstance(dtype, pl.Struct): + raise NotImplementedError("Structs not yet supported by patito") + if not isinstance(dtype, pl.List) or isinstance(dtype, pl.Array): + raise NotImplementedError( + f"Unsupported nested dtype: {dtype} of type {type(dtype)}" + ) + if dtype.inner is None: + return convert.DataTypeMappings.DTYPE_TO_FFINAME[dtype.base_type()] + return f"{convert.DataTypeMappings.DTYPE_TO_FFINAME[dtype.base_type()]}[{parse_composite_dtype(dtype.inner)}]" + elif dtype in pl.TEMPORAL_DTYPES: + return dtype_str_repr(dtype) + else: + return convert.DataTypeMappings.DTYPE_TO_FFINAME[dtype] + + +def dtype_from_string(v: str): + """for deserialization""" + # TODO test all dtypes + return convert.dtype_short_repr_to_dtype(v) + + +def validate_polars_dtype( + annotation: type[Any] | None, + dtype: DataType | DataTypeClass | None, + column: Optional[str] = None, +): + """ + Check that the polars dtype is valid for the given annotation. Raises ValueError if not. + + Args: + annotation (type[Any] | None): python type annotation + dtype (DataType | DataTypeClass | None): polars dtype + column (Optional[str], optional): column name. Defaults to None. + """ + if ( + dtype is None or annotation is None + ): # no potential conflict between type annotation and chosen polars type + return + valid_dtypes = valid_polars_dtypes_for_annotation(annotation) + if dtype not in valid_dtypes: + if column: + column_msg = f" for column `{column}`" + else: + column_msg = "" + raise ValueError( + f"Invalid dtype {dtype}{column_msg}. Allowable polars dtypes for {display_as_type(annotation)} are: {', '.join([str(x) for x in valid_dtypes])}." + ) + return + + +def validate_annotation(annotation: type[Any] | None, column: Optional[str] = None): + """ + Check that the provided annotation has polars/patito support (we can resolve it to a default dtype). Raises ValueError if not. + + Args: + annotation (type[Any] | None): python type annotation + column (Optional[str], optional): column name. Defaults to None. + """ + default_dtype = default_polars_dtype_for_annotation(annotation) + if default_dtype is None: + valid_polars_dtypes = valid_polars_dtypes_for_annotation(annotation) + if column: + column_msg = f" for column `{column}`" + else: + column_msg = "" + if len(valid_polars_dtypes) == 0: + raise ValueError( + f"Annotation {display_as_type(annotation)}{column_msg} is not compatible with any polars dtypes." + ) + else: + raise ValueError( + f"Unable to determine default dtype for annotation {display_as_type(annotation)}{column_msg}. Please provide a valid default polars dtype via the `dtype` argument to `Field`. Valid dtypes are: {', '.join([str(x) for x in valid_polars_dtypes])}." + ) + return + + +def valid_polars_dtypes_for_annotation( + annotation: type[Any] | None +) -> FrozenSet[DataTypeClass | DataType]: + """Returns a set of polars types that are valid for the given annotation. If the annotation is Any, returns all supported polars dtypes. + + Args: + annotation (type[Any] | None): python type annotation + + Returns: + FrozenSet[DataTypeClass | DataType]: set of polars dtypes + """ + if annotation == Any: + return PT_BASE_SUPPORTED_DTYPES + schema = TypeAdapter(annotation).json_schema() + return _valid_polars_dtypes_for_schema(schema) + + +def default_polars_dtype_for_annotation( + annotation: type[Any] | None +) -> DataTypeClass | DataType | None: + """Returns the default polars dtype for the given annotation. If the annotation is Any, returns pl.Utf8. If no default dtype can be determined, returns None. + + Args: + annotation (type[Any] | None): python type annotation + + Returns: + DataTypeClass | DataType | None: polars dtype + """ + if annotation == Any: + return pl.Utf8 + schema = TypeAdapter(annotation).json_schema() + return _default_polars_dtype_for_schema(schema) + + +def _valid_polars_dtypes_for_schema( + schema: Dict +) -> FrozenSet[DataTypeClass | DataType]: + valid_type_sets = [] + if "anyOf" in schema: + schema = _without_optional(schema) + for sub_props in schema["anyOf"]: + valid_type_sets.append( + set(_pydantic_subschema_to_valid_polars_types(sub_props)) + ) + else: + valid_type_sets.append(set(_pydantic_subschema_to_valid_polars_types(schema))) + return set.intersection(*valid_type_sets) if valid_type_sets else frozenset() # pyright: ignore + + +def _default_polars_dtype_for_schema(schema: Dict) -> DataTypeClass | DataType | None: + if "anyOf" in schema: + if len(schema["anyOf"]) == 2: # look for optionals first + schema = _without_optional(schema) + if len(schema["anyOf"]) == 1: + schema = schema["anyOf"][0] + else: + return None + return _pydantic_subschema_to_default_dtype(schema) + + +def _without_optional(schema: Dict) -> Dict: + if "anyOf" in schema: + for sub_props in schema["anyOf"]: + if "type" in sub_props and sub_props["type"] == "null": + schema["anyOf"].remove(sub_props) + return schema + + +def _pydantic_subschema_to_valid_polars_types( + props: Dict, +) -> FrozenSet[DataTypeClass | DataType]: + if "type" not in props: + if "enum" in props: + raise TypeError("Mixed type enums not supported by patito.") + elif "const" in props: + return valid_polars_dtypes_for_annotation(type(props["const"])) + return frozenset() + pyd_type = props.get("type") + if pyd_type == "array": + if "items" not in props: + raise NotImplementedError( + "Unexpected error processing pydantic schema. Please file an issue." + ) + array_props = props["items"] + item_dtypes = _valid_polars_dtypes_for_schema(array_props) + # TODO support pl.Array? + return DataTypeGroup([pl.List(dtype) for dtype in item_dtypes]) + return _pyd_type_to_valid_dtypes( + PydanticBaseType(pyd_type), props.get("format"), props.get("enum") + ) + + +def _pydantic_subschema_to_default_dtype( + props: Dict +) -> DataTypeClass | DataType | None: + if "type" not in props: + if "enum" in props: + raise TypeError("Mixed type enums not supported by patito.") + elif "const" in props: + return default_polars_dtype_for_annotation(type(props["const"])) + return None + pyd_type = props.get("type") + if pyd_type == "array": + if "items" not in props: + raise NotImplementedError( + "Unexpected error processing pydantic schema. Please file an issue." + ) + array_props = props["items"] + inner_default_type = _default_polars_dtype_for_schema(array_props) + if inner_default_type is None: + return None + return pl.List(inner_default_type) + return _pyd_type_to_default_dtype( + PydanticBaseType(pyd_type), props.get("format"), props.get("enum") + ) + + +def _pyd_type_to_valid_dtypes( + pyd_type: PydanticBaseType, string_format: Optional[str], enum: List[str] | None +) -> FrozenSet[DataTypeClass | DataType]: + if enum is not None: + _validate_enum_values(pyd_type, enum) + return DataTypeGroup( + [pl.Categorical, pl.Utf8] + ) # TODO use pl.Enum in future polars versions + if pyd_type.value == "integer": + return DataTypeGroup(INTEGER_DTYPES | FLOAT_DTYPES) + elif pyd_type.value == "number": + return FLOAT_DTYPES + elif pyd_type.value == "boolean": + return BOOLEAN_DTYPES + elif pyd_type.value == "string": + _string_format = ( + PydanticStringFormat(string_format) if string_format is not None else None + ) + return _pyd_string_format_to_valid_dtypes(_string_format) + elif pyd_type.value == "null": + return frozenset({pl.Null}) + else: + return frozenset() + + +def _pyd_type_to_default_dtype( + pyd_type: PydanticBaseType, string_format: Optional[str], enum: List[str] | None +) -> DataTypeClass | DataType: + if enum is not None: + _validate_enum_values(pyd_type, enum) + return pl.Categorical + elif pyd_type.value == "integer": + return pl.Int64 + elif pyd_type.value == "number": + return pl.Float64 + elif pyd_type.value == "boolean": + return pl.Boolean + elif pyd_type.value == "string": + _string_format = ( + PydanticStringFormat(string_format) if string_format is not None else None + ) + return _pyd_string_format_to_default_dtype(_string_format) + elif pyd_type.value == "null": + return pl.Null + elif pyd_type.value == "object": + raise ValueError("pydantic object types not currently supported by patito") + else: + raise NotImplementedError + + +def _pyd_string_format_to_valid_dtypes( + string_format: PydanticStringFormat | None +) -> FrozenSet[DataTypeClass | DataType]: + if string_format is None: + return STRING_DTYPES + elif string_format.value == "date": + return DATE_DTYPES + elif string_format.value == "date-time": + return DATETIME_DTYPES + elif string_format.value == "duration": + return DURATION_DTYPES + elif string_format.value == "time": + return TIME_DTYPES + else: + raise NotImplementedError + + +def _pyd_string_format_to_default_dtype( + string_format: PydanticStringFormat | None +) -> DataTypeClass | DataType: + if string_format is None: + return pl.Utf8 + elif string_format.value == "date": + return pl.Date + elif string_format.value == "date-time": + return pl.Datetime + elif string_format.value == "duration": + return pl.Duration + elif string_format.value == "time": + return pl.Time + else: + raise NotImplementedError + + +def _validate_enum_values(pyd_type: PydanticBaseType, enum: Sequence): + enum_types = set(type(value) for value in enum) + if len(enum_types) > 1: + raise TypeError( + f"All enumerated values of enums used to annotate Patito model fields must have the same type. Encountered types: {sorted(map(lambda t: t.__name__, enum_types))}." + ) + if pyd_type.value != "string": + raise TypeError( + f"Enums used to annotate Patito model fields must be strings. Encountered type: {enum_types.pop().__name__}." + ) diff --git a/src/patito/_pydantic/repr.py b/src/patito/_pydantic/repr.py index 35f2777..f245b97 100644 --- a/src/patito/_pydantic/repr.py +++ b/src/patito/_pydantic/repr.py @@ -1,19 +1,19 @@ +import sys import types import typing from typing import ( Any, - Tuple, Callable, Generator, - Union, - Sequence, Iterable, + Literal, Optional, - get_origin, + Sequence, + Tuple, + Union, get_args, - Literal, + get_origin, ) -import sys if typing.TYPE_CHECKING: Loc = Tuple[Union[int, str], ...] diff --git a/src/patito/exceptions.py b/src/patito/exceptions.py index 33b3ca6..893ed66 100644 --- a/src/patito/exceptions.py +++ b/src/patito/exceptions.py @@ -8,8 +8,8 @@ Sequence, Tuple, Type, - Union, TypedDict, + Union, ) from patito._pydantic.repr import Representation diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 1f3eb19..44120c7 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -2,42 +2,53 @@ from __future__ import annotations import itertools +import json from collections.abc import Iterable from datetime import date, datetime +from functools import cached_property from typing import ( TYPE_CHECKING, Any, ClassVar, Dict, + FrozenSet, List, + Literal, Optional, + Sequence, + Tuple, Type, TypeVar, Union, cast, - Literal, get_args, - Sequence, - Tuple, - Callable, ) -import json import polars as pl -from polars.polars import dtype_str_repr -from polars.datatypes import PolarsDataType, convert, DataTypeClass, DataType -from pydantic import ( - fields, +from polars.datatypes import DataType, DataTypeClass, PolarsDataType, convert +from pydantic import ( # noqa: F401 + BaseModel, + ConfigDict, + create_model, field_serializer, + fields, + JsonDict, ) -from pydantic import ConfigDict, BaseModel, create_model # noqa: F401 from pydantic._internal._model_construction import ( ModelMetaclass as PydanticModelMetaclass, ) +from patito._pydantic.dtypes import ( + default_polars_dtype_for_annotation, + dtype_from_string, + parse_composite_dtype, + valid_polars_dtypes_for_annotation, + validate_annotation, + validate_polars_dtype, +) +from patito._pydantic.repr import display_as_type from patito.polars import DataFrame, LazyFrame from patito.validators import validate -from patito._pydantic.repr import display_as_type try: import pandas as pd @@ -63,14 +74,6 @@ "boolean": "BOOLEAN", } -# A mapping from pydantic types to equivalent dtypes used in polars -PYDANTIC_TO_POLARS_TYPES = { - "integer": pl.Int64, - "string": pl.Utf8, - "number": pl.Float64, - "boolean": pl.Boolean, -} - PYTHON_TO_PYDANTIC_TYPES = { str: "string", int: "integer", @@ -79,34 +82,6 @@ type(None): "null", } -PL_INTEGER_DTYPES = [ - pl.Int64, - pl.Int32, - pl.Int16, - pl.Int8, - pl.UInt64, - pl.UInt32, - pl.UInt16, - pl.UInt8, -] - - -class classproperty: # TODO figure out how to get typing to carry through this decorator - """Equivalent to @property, but works on a class (doesn't require an instance). - - https://github.com/pola-rs/polars/blob/8d29d3cebec713363db4ad5d782c74047e24314d/py-polars/polars/datatypes/classes.py#L25C12-L25C12 - """ - - def __init__(self, method: Callable[..., Any] | None = None) -> None: - self.fget = method - - def __get__(self, instance: Any, cls: type | None = None) -> Any: - return self.fget(cls) # type: ignore[misc] - - def getter(self, method: Callable[..., Any]) -> Any: # noqa: D102 - self.fget = method - return self - class ModelMetaclass(PydanticModelMetaclass): """ @@ -115,21 +90,65 @@ class ModelMetaclass(PydanticModelMetaclass): Responsible for setting any relevant model-dependent class properties. """ - ... + if TYPE_CHECKING: + model_fields: ClassVar[Dict[str, fields.FieldInfo]] + def __init__(cls, name: str, bases: tuple, clsdict: dict, **kwargs) -> None: + """ + Construct new patito model. -class Model(BaseModel, metaclass=ModelMetaclass): - """Custom pydantic class for representing table schema and constructing rows.""" + Args: + name: Name of model class. + bases: Tuple of superclasses. + clsdict: Dictionary containing class properties. + """ + super().__init__(name, bases, clsdict, **kwargs) + # Add a custom subclass of patito.DataFrame to the model class, + # where .set_model() has been implicitly set. + cls.DataFrame = DataFrame._construct_dataframe_model_class( + model=cls, # type: ignore + ) + # Similarly for LazyFrame + cls.LazyFrame = LazyFrame._construct_lazyframe_model_class( + model=cls, # type: ignore + ) + for column in cls.columns: # pyright: ignore TODO why is this needed? + col_info = cls.column_infos[column] + field_info = cls.model_fields[column] + if col_info.dtype: + validate_polars_dtype( + annotation=field_info.annotation, dtype=col_info.dtype + ) + else: + validate_annotation(field_info.annotation) - if TYPE_CHECKING: - model_fields: ClassVar[Dict[str, fields.FieldInfo]] + @property # TODO try cache + def column_infos(cls) -> Dict[str, ColumnInfo]: + """helper method for extracting patito-specific ColumnInfo objects from `model_fields` - model_config = ConfigDict( - ignored_types=(classproperty,), - ) + Returns: + Dict[str, ColumnInfo]: dictionary mapping column names to patito-specific column metadata - @classproperty - def columns(cls: Type[ModelType]) -> List[str]: # type: ignore + """ + fields = cls.model_fields + + def get_column_info(field: fields.FieldInfo) -> ColumnInfo: + if field.json_schema_extra is None: + return ColumnInfo() + elif callable(field.json_schema_extra): + raise NotImplementedError( + "Callable json_schema_extra not supported by patito." + ) + return field.json_schema_extra["column_info"] # pyright: ignore # TODO JsonDict fix + + return {k: get_column_info(v) for k, v in fields.items()} + + # @property + # def model_fields(cls) -> Dict[str, fields.FieldInfo]: + # return cls.model_fields + + @property + def columns(cls: Type[ModelType]) -> List[str]: """ Return the name of the dataframe columns specified by the fields of the model. @@ -147,10 +166,10 @@ def columns(cls: Type[ModelType]) -> List[str]: # type: ignore """ return list(cls.model_json_schema()["properties"].keys()) - @classproperty + @property def dtypes( # type: ignore cls: Type[ModelType], # pyright: ignore - ) -> dict[str, PolarsDataType]: + ) -> dict[str, DataTypeClass | DataType]: """ Return the polars dtypes of the dataframe. @@ -170,14 +189,27 @@ def dtypes( # type: ignore >>> Product.dtypes {'name': Utf8, 'ideal_temperature': Int64, 'price': Float64} """ - return { - column: valid_dtypes[0] for column, valid_dtypes in cls.valid_dtypes.items() - } + default_dtypes = {} + for column in cls.columns: + dtype = cls.column_infos[column].dtype + if dtype is None: + default_dtype = default_polars_dtype_for_annotation( + cls.model_fields[column].annotation + ) + if default_dtype is None: + raise ValueError( + f"Unable to find a default dtype for column `{column}`" + ) + else: + default_dtypes[column] = default_dtype + else: + default_dtypes[column] = dtype + return default_dtypes - @classproperty + @property def valid_dtypes( # type: ignore cls: Type[ModelType], # pyright: ignore - ) -> dict[str, List[Union[PolarsDataType, pl.List]]]: + ) -> dict[str, FrozenSet[DataTypeClass | DataType]]: """ Return a list of polars dtypes which Patito considers valid for each field. @@ -206,127 +238,17 @@ def valid_dtypes( # type: ignore 'int_column': [Int64, Int32, Int16, Int8, UInt64, UInt32, UInt16, UInt8], 'str_column': [Utf8]} """ - valid_dtypes = {} - for column, props in cls._schema_properties().items(): - column_dtypes: List[Union[PolarsDataType, pl.List]] - column_dtypes = cls._valid_dtypes(column, props=props) # pyright: ignore - - if column_dtypes is None: - raise NotImplementedError( - f"No valid dtype mapping found for column '{column}'." - ) - valid_dtypes[column] = column_dtypes - - return valid_dtypes - - @classmethod - def _valid_dtypes( # noqa: C901 - cls: Type[ModelType], # pyright: ignore - column: str, - props: Dict, - ) -> Optional[List[PolarsDataType]]: - """ - Map schema property to list of valid polars data types. - - Args: - props: Dictionary value retrieved from BaseModel._schema_properties(). - - Returns: - List of valid dtypes. None if no mapping exists. - """ - column_info = cls.column_infos[column] - if ( - column_info.dtype is not None and "column_info" in props - ): # hack to make sure we only use dtype if we're in the outer scope - - def dtype_invalid(props: Dict) -> Tuple[bool, List[PolarsDataType]]: - if "type" in props: - valid_pl_types = cls._pydantic_type_to_valid_polars_types( - column, props - ) - if column_info.dtype not in valid_pl_types: - return True, valid_pl_types or [] - elif "anyOf" in props: - for sub_props in props["anyOf"]: - if sub_props["type"] == "null": - continue - else: - valid_pl_types = cls._pydantic_type_to_valid_polars_types( - column, sub_props - ) - if column_info.dtype not in valid_pl_types: - return True, valid_pl_types or [] - return False, [] - - invalid, valid_pl_types = dtype_invalid(props) - if invalid: - raise ValueError( - f"Invalid dtype {column_info.dtype} for column '{column}'. Allowable polars dtypes for {display_as_type(cls.model_fields[column].annotation)} are: {', '.join([str(x) for x in valid_pl_types])}." - ) - return [ - column_info.dtype, - ] - elif "enum" in props and props["type"] == "string": - return [pl.Categorical, pl.Utf8] - elif "type" not in props: - if "anyOf" in props: - res = [ - cls._valid_dtypes(column, sub_props) for sub_props in props["anyOf"] - ] - res = [x for x in res if x is not None] - return list(itertools.chain.from_iterable(res)) - elif "const" in props: - return cls._valid_dtypes( - column, {"type": PYTHON_TO_PYDANTIC_TYPES.get(type(props["const"]))} - ) - return None - - return cls._pydantic_type_to_valid_polars_types(column, props) - @classmethod - def _pydantic_type_to_valid_polars_types( - cls, - column: str, - props: Dict, - ) -> Optional[List[PolarsDataType]]: - if props["type"] == "array": - array_props = props["items"] - item_dtypes = ( - cls._valid_dtypes(column, array_props) if array_props else None + return { + column: valid_polars_dtypes_for_annotation( + cls.model_fields[column].annotation ) - if item_dtypes is None: - raise NotImplementedError( - f"No valid dtype mapping found for column '{column}'." - ) - return [pl.List(dtype) for dtype in item_dtypes] - elif props["type"] == "integer": - return PL_INTEGER_DTYPES - elif props["type"] == "number": - if props.get("format") == "time-delta": - return [pl.Duration] - else: - return [pl.Float64, pl.Float32] - elif props["type"] == "boolean": - return [pl.Boolean] - elif props["type"] == "string": - string_format = props.get("format") - if string_format is None: - return [pl.Utf8] - elif string_format == "date": - return [pl.Date] - # TODO: Find out why this branch is not being hit - elif string_format == "date-time": # pragma: no cover - return [pl.Datetime] - elif string_format == "duration": - return [pl.Duration] - elif string_format.startswith("uuid"): - return [pl.Object] - else: - return None # pragma: no cover - elif props["type"] == "null": - return [pl.Null] + if cls.column_infos[column].dtype is None + else frozenset({cls.column_infos[column].dtype}) + for column in cls.columns + } # pyright: ignore - @classproperty + @property # TODO deprecate? def valid_sql_types( # type: ignore # noqa: C901 cls: Type[ModelType], # pyright: ignore ) -> dict[str, List["DuckDBSQLType"]]: @@ -461,7 +383,7 @@ def valid_sql_types( # type: ignore # noqa: C901 return valid_dtypes - @classproperty + @property def sql_types( # type: ignore cls: Type[ModelType], # pyright: ignore ) -> dict[str, str]: @@ -492,7 +414,7 @@ def sql_types( # type: ignore for column, valid_types in cls.valid_sql_types.items() } - @classproperty + @property def defaults( # type: ignore cls: Type[ModelType], # pyright: ignore ) -> dict[str, Any]: @@ -514,12 +436,11 @@ def defaults( # type: ignore {'price': 0, 'temperature_zone': 'dry'} """ return { - field_name: props["default"] - for field_name, props in cls._schema_properties().items() - if "default" in props + field_name: cls.model_fields[field_name].default + for field_name in cls.columns } - @classproperty + @property def non_nullable_columns( # type: ignore cls: Type[ModelType], # pyright: ignore ) -> set[str]: @@ -547,7 +468,7 @@ def non_nullable_columns( # type: ignore if type(None) not in get_args(cls.model_fields[k].annotation) ) - @classproperty + @property def nullable_columns( # type: ignore cls: Type[ModelType], # pyright: ignore ) -> set[str]: @@ -571,7 +492,7 @@ def nullable_columns( # type: ignore """ return set(cls.columns) - cls.non_nullable_columns - @classproperty + @property def unique_columns( # type: ignore cls: Type[ModelType], # pyright: ignore ) -> set[str]: @@ -596,23 +517,9 @@ def unique_columns( # type: ignore infos = cls.column_infos return {column for column in cls.columns if infos[column].unique} - @classproperty - def DataFrame( - cls: Type[ModelType], # type: ignore[misc] - ) -> Type[DataFrame[ModelType]]: # pyright: ignore # noqa - """Return DataFrame class where DataFrame.set_model() is set to self.""" - return DataFrame._construct_dataframe_model_class( - model=cls, # type: ignore - ) - @classproperty - def LazyFrame( - cls: Type[ModelType], # type: ignore[misc] - ) -> Type[LazyFrame[ModelType]]: # pyright: ignore - """Return DataFrame class where DataFrame.set_model() is set to self.""" - return LazyFrame._construct_lazyframe_model_class( - model=cls, # type: ignore - ) +class Model(BaseModel, metaclass=ModelMetaclass): + """Custom pydantic class for representing table schema and constructing rows.""" @classmethod def from_row( @@ -1090,7 +997,7 @@ def examples( if column_name not in kwargs: if column_name in cls.unique_columns: unique_series.append( - pl.first().cumcount().cast(dtype).alias(column_name) + pl.first().cum_count().cast(dtype).alias(column_name) ) else: example_value = cls.example_value(field=column_name) @@ -1389,30 +1296,7 @@ def with_fields( field_mapping=fields, ) - @classmethod - def model_json_schema(cls, *args, **kwargs) -> Dict[str, Any]: - schema = super().model_json_schema(*args, **kwargs) - # for k, v in schema["properties"].items(): - # if 'column_info' in v: - # schema["properties"][k] = {**schema['properties'][k].pop("column_info"), **schema["properties"][k]} - return schema - - @classproperty - def column_infos(cls) -> Dict[str, ColumnInfo]: - fields = cls.model_fields - - def get_column_info(field: fields.FieldInfo) -> ColumnInfo: - if field.json_schema_extra is None: - return ColumnInfo() - elif callable(field.json_schema_extra): - raise NotImplementedError( - "Callable json_schema_extra not supported by patito." - ) - return field.json_schema_extra["column_info"] - - return {k: get_column_info(v) for k, v in fields.items()} - - @classmethod + @classmethod # TODO reduce references to this in favor of ColumnInfo/FieldInfo def _schema_properties(cls) -> Dict[str, Dict[str, Any]]: """ Return schema properties where definition references have been resolved. @@ -1452,11 +1336,6 @@ def _schema_properties(cls) -> Dict[str, Dict[str, Any]]: fields[field_name]["type"] = PYTHON_TO_PYDANTIC_TYPES[ type(field_info["const"]) ] - # for f in cls.model_fields[field_name]._attributes_set: - # if f not in fields[field_name]: - # v = getattr(cls.model_fields[field_name], f, None) - # if v is not None: - # fields[field_name][f] = v return fields @@ -1506,7 +1385,7 @@ def _derive_model( def _derive_field( field: fields.FieldInfo, make_nullable: bool = False, - ) -> Tuple[Type, fields.FieldInfo]: + ) -> Tuple[Type | None, fields.FieldInfo]: field_type = field.annotation default = field.default extra_attrs = { @@ -1532,31 +1411,26 @@ def _derive_field( return field_type, field_new -def parse_composite_dtype(dtype: DataTypeClass | DataType) -> str: - if dtype in pl.NESTED_DTYPES: # TODO deprecated, move onto lookup - return f"{convert.DataTypeMappings.DTYPE_TO_FFINAME[dtype.base_type()]}[{parse_composite_dtype(dtype.inner)}]" - elif dtype in pl.DATETIME_DTYPES: - return dtype_str_repr(dtype) - else: - return convert.DataTypeMappings.DTYPE_TO_FFINAME[dtype] - - -def dtype_from_string(v: str): - """for deserialization""" - # TODO test all dtypes - return convert.dtype_short_repr_to_dtype(v) +class ColumnInfo(BaseModel, arbitrary_types_allowed=True): + """patito-side model for storing column metadata + Args: + constraints (Union[polars.Expression, List[polars.Expression]): A single + constraint or list of constraints, expressed as a polars expression objects. + All rows must satisfy the given constraint. You can refer to the given column + with ``pt.field``, which will automatically be replaced with + ``polars.col()`` before evaluation. + derived_from (Union[str, polars.Expr]): used to mark fields that are meant to be derived from other fields. Users can specify a polars expression that will be called to derive the column value when `pt.DataFrame.derive` is called. + dtype (polars.datatype.DataType): The given dataframe column must have the given + polars dtype, for instance ``polars.UInt64`` or ``pl.Float32``. + unique (bool): All row values must be unique. + """ -class ColumnInfo(BaseModel, arbitrary_types_allowed=True): - dtype: DataTypeClass | DataType | None = None # TODO polars migrating onto using instances? https://github.com/pola-rs/polars/issues/6163 + dtype: DataTypeClass | DataType | None = None constraints: pl.Expr | Sequence[pl.Expr] | None = None derived_from: str | pl.Expr | None = None unique: bool | None = None - # @model_serializer - # def serialize(self) -> Dict[str, Any]: - # return {k: v for k, v in self.__dict__.items() if v is not None} - @field_serializer("constraints", "derived_from") def serialize_exprs(self, exprs: str | pl.Expr | Sequence[pl.Expr] | None) -> Any: if exprs is None: @@ -1604,17 +1478,6 @@ def Field( unique: bool | None = None, **kwargs, ) -> Any: - column_info = ColumnInfo( - dtype=dtype, constraints=constraints, derived_from=derived_from, unique=unique - ) - return fields.Field( - *args, - json_schema_extra={"column_info": column_info}, - **kwargs, - ) - - -class FieldDoc: """ Annotate model field with additional type and validation information. @@ -1629,10 +1492,19 @@ class FieldDoc: All rows must satisfy the given constraint. You can refer to the given column with ``pt.field``, which will automatically be replaced with ``polars.col()`` before evaluation. - derived_from (Union[str, polars.Expr]): used to mark fields that are meant to be derived from other fields. Users can specify a polars expression that will be called to derive the column value when `pt.DataFrame.derive` is called. + unique (bool): All row values must be unique. dtype (polars.datatype.DataType): The given dataframe column must have the given polars dtype, for instance ``polars.UInt64`` or ``pl.Float32``. - unique (bool): All row values must be unique. + gt: All values must be greater than ``gt``. + ge: All values must be greater than or equal to ``ge``. + lt: All values must be less than ``lt``. + le: All values must be less than or equal to ``lt``. + multiple_of: All values must be multiples of the given value. + const (bool): If set to ``True`` `all` values must be equal to the provided + default value, the first argument provided to the ``Field`` constructor. + regex (str): UTF-8 string column must match regex pattern for all row values. + min_length (int): Minimum length of all string values in a UTF-8 column. + max_length (int): Maximum length of all string values in a UTF-8 column. Return: FieldInfo: Object used to represent additional constraints put upon the given @@ -1674,6 +1546,14 @@ class FieldDoc: brand_color 2 rows with out of bound values. (type=value_error.rowvalue) """ - - -Field.__doc__ = FieldDoc.__doc__ + column_info = ColumnInfo( + dtype=dtype, constraints=constraints, derived_from=derived_from, unique=unique + ) + field_info = fields.Field( + *args, + json_schema_extra={ + "column_info": column_info # pyright: ignore TODO pydantic expects JsonDict here, how to signal this is serializable? + }, + **kwargs, + ) + return field_info diff --git a/src/patito/validators.py b/src/patito/validators.py index 41e5be2..498f0f2 100644 --- a/src/patito/validators.py +++ b/src/patito/validators.py @@ -9,12 +9,12 @@ from patito.exceptions import ( ColumnDTypeError, + DataFrameValidationError, ErrorWrapper, MissingColumnsError, MissingValuesError, RowValueError, SuperflousColumnsError, - DataFrameValidationError, ) if sys.version_info >= (3, 10): # pragma: no cover @@ -148,9 +148,9 @@ def _find_errors( # noqa: C901 for column, dtype in schema.dtypes.items(): if not isinstance(dtype, pl.List): - continue + continue # TODO add validation here - annotation = schema.__annotations__[column] # type: ignore[unreachable] + annotation = schema.model_fields[column].annotation # Retrieve the annotation of the list itself, # dewrapping any potential Optional[...] @@ -194,7 +194,9 @@ def _find_errors( # noqa: C901 continue polars_type = dataframe_datatypes[column_name] - if polars_type not in valid_dtypes[column_name]: + if ( + polars_type not in valid_dtypes[column_name] + ): # TODO allow for `strict` validation errors.append( ErrorWrapper( ColumnDTypeError( diff --git a/tests/test_database.py b/tests/test_database.py index bd06679..2ac320c 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -5,11 +5,10 @@ from tempfile import TemporaryDirectory from typing import TYPE_CHECKING, List, Optional +import patito as pt import polars as pl import pytest -import patito as pt - if TYPE_CHECKING: import pyarrow as pa # type: ignore else: diff --git a/tests/test_dtypes.py b/tests/test_dtypes.py new file mode 100644 index 0000000..52a8260 --- /dev/null +++ b/tests/test_dtypes.py @@ -0,0 +1,155 @@ +from datetime import date, datetime, time, timedelta +from typing import Dict, List, Literal, Sequence, Tuple + +import polars as pl +import pytest +from patito._pydantic.dtypes import ( + BOOLEAN_DTYPES, + DATE_DTYPES, + DATETIME_DTYPES, + DURATION_DTYPES, + FLOAT_DTYPES, + INTEGER_DTYPES, + PT_BASE_SUPPORTED_DTYPES, + STRING_DTYPES, + TIME_DTYPES, + DataTypeGroup, + default_polars_dtype_for_annotation, + valid_polars_dtypes_for_annotation, + validate_annotation, + validate_polars_dtype, +) + + +def test_valids_basic_annotations(): + # base types + assert valid_polars_dtypes_for_annotation(str) == STRING_DTYPES + assert valid_polars_dtypes_for_annotation(int) == DataTypeGroup( + INTEGER_DTYPES | FLOAT_DTYPES + ) + assert valid_polars_dtypes_for_annotation(float) == FLOAT_DTYPES + assert valid_polars_dtypes_for_annotation(bool) == BOOLEAN_DTYPES + + # temporals + assert valid_polars_dtypes_for_annotation(datetime) == DATETIME_DTYPES + assert valid_polars_dtypes_for_annotation(date) == DATE_DTYPES + assert valid_polars_dtypes_for_annotation(time) == TIME_DTYPES + assert valid_polars_dtypes_for_annotation(timedelta) == DURATION_DTYPES + + # other + with pytest.raises(TypeError, match="must be strings"): + valid_polars_dtypes_for_annotation(Literal[1, 2, 3]) # pyright: ignore + with pytest.raises(TypeError, match="Mixed type enums not supported"): + valid_polars_dtypes_for_annotation(Literal[1, 2, "3"]) # pyright: ignore + + assert valid_polars_dtypes_for_annotation(Literal["a", "b", "c"]) == { # pyright: ignore + pl.Categorical, + pl.Utf8, + } + + # combos + assert valid_polars_dtypes_for_annotation(str | None) == STRING_DTYPES + assert valid_polars_dtypes_for_annotation(int | float) == FLOAT_DTYPES + assert ( + valid_polars_dtypes_for_annotation(str | int) == frozenset() + ) # incompatible, TODO raise patito error with strict validation on + + # invalids + assert valid_polars_dtypes_for_annotation(object) == frozenset() + + +def test_valids_nested_annotations(): + assert len(valid_polars_dtypes_for_annotation(List)) == 0 # needs inner annotation + assert ( + valid_polars_dtypes_for_annotation(Tuple) + == valid_polars_dtypes_for_annotation(List) + == valid_polars_dtypes_for_annotation(Sequence) + ) # for now, these are the same + + assert valid_polars_dtypes_for_annotation(List[str]) == {pl.List(pl.Utf8)} + assert valid_polars_dtypes_for_annotation(List[str] | None) == {pl.List(pl.Utf8)} + assert len(valid_polars_dtypes_for_annotation(List[int])) == len( + DataTypeGroup(INTEGER_DTYPES | FLOAT_DTYPES) + ) + assert len(valid_polars_dtypes_for_annotation(List[int | float])) == len( + FLOAT_DTYPES + ) + assert len(valid_polars_dtypes_for_annotation(List[int | None])) == len( + DataTypeGroup(INTEGER_DTYPES | FLOAT_DTYPES) + ) + assert valid_polars_dtypes_for_annotation(List[List[str]]) == { + pl.List(pl.List(pl.Utf8)) + } # recursion works as expected + + assert ( + valid_polars_dtypes_for_annotation(Dict) == frozenset() + ) # not currently supported + + +def test_dtype_validation(): + validate_polars_dtype(int, pl.Int16) # no issue + validate_polars_dtype(int, pl.Float64) # no issue + with pytest.raises(ValueError, match="Invalid dtype"): + validate_polars_dtype(int, pl.Utf8) + + with pytest.raises(ValueError, match="Invalid dtype"): + validate_polars_dtype(List[str], pl.List(pl.Float64)) + + +def test_defaults_basic_annotations(): + # base types + assert default_polars_dtype_for_annotation(str) == pl.Utf8 + assert default_polars_dtype_for_annotation(int) == pl.Int64 + assert default_polars_dtype_for_annotation(float) == pl.Float64 + assert default_polars_dtype_for_annotation(bool) == pl.Boolean + + # temporals + assert default_polars_dtype_for_annotation(datetime) == pl.Datetime + assert default_polars_dtype_for_annotation(date) == pl.Date + assert default_polars_dtype_for_annotation(time) == pl.Time + assert default_polars_dtype_for_annotation(timedelta) == pl.Duration + + # combos + assert default_polars_dtype_for_annotation(str | None) == pl.Utf8 + assert default_polars_dtype_for_annotation(int | float) == None + assert default_polars_dtype_for_annotation(str | int) == None + + # invalids + assert default_polars_dtype_for_annotation(object) == None + + +def test_defaults_nested_annotations(): + assert default_polars_dtype_for_annotation(List) == None # needs inner annotation + + assert default_polars_dtype_for_annotation(List[str]) == pl.List(pl.Utf8) + assert default_polars_dtype_for_annotation(List[str] | None) == pl.List(pl.Utf8) + assert default_polars_dtype_for_annotation(List[int]) == pl.List(pl.Int64) + assert default_polars_dtype_for_annotation(List[int | None]) == pl.List(pl.Int64) + assert default_polars_dtype_for_annotation(List[int | float]) == None + assert default_polars_dtype_for_annotation(List[str | int]) == None + assert default_polars_dtype_for_annotation(List[List[str]]) == pl.List( + pl.List(pl.Utf8) + ) # recursion works as expected + assert default_polars_dtype_for_annotation(List[List[str | None]]) == pl.List( + pl.List(pl.Utf8) + ) + + with pytest.raises( + ValueError, match="pydantic object types not currently supported" + ): + default_polars_dtype_for_annotation(Dict) + + +def test_annotation_validation(): + validate_annotation(int) # no issue + validate_annotation(int | None) + with pytest.raises(ValueError, match="Valid dtypes are:"): + validate_annotation(int | float) + with pytest.raises(ValueError, match="not compatible with any polars dtypes"): + validate_annotation(str | int) + + validate_annotation(List[int | None]) + with pytest.raises(ValueError, match="not compatible with any polars dtypes"): + validate_annotation(List[str | int]) + with pytest.raises(ValueError, match="Valid dtypes are:"): + validate_annotation(List[int | float]) diff --git a/tests/test_duckdb/test_database.py b/tests/test_duckdb/test_database.py index dd4962a..88e670b 100644 --- a/tests/test_duckdb/test_database.py +++ b/tests/test_duckdb/test_database.py @@ -2,12 +2,11 @@ import enum from typing import Optional +import patito as pt import polars as pl import pytest from typing_extensions import Literal -import patito as pt - # Skip test module if DuckDB is not installed if not pt._DUCKDB_AVAILABLE: pytest.skip("DuckDB not installed", allow_module_level=True) diff --git a/tests/test_duckdb/test_relation.py b/tests/test_duckdb/test_relation.py index 5191ed9..60d8acb 100644 --- a/tests/test_duckdb/test_relation.py +++ b/tests/test_duckdb/test_relation.py @@ -4,12 +4,11 @@ from typing import Optional from unittest.mock import MagicMock +import patito as pt import polars as pl import pytest from typing_extensions import Literal -import patito as pt - # Skip test module if DuckDB is not installed if not pt._DUCKDB_AVAILABLE: pytest.skip("DuckDB not installed", allow_module_level=True) diff --git a/tests/test_dummy_data.py b/tests/test_dummy_data.py index e3e6db2..3071edb 100644 --- a/tests/test_dummy_data.py +++ b/tests/test_dummy_data.py @@ -1,12 +1,11 @@ """Test of functionality related to the generation of dummy data.""" from datetime import date, datetime -from typing import Optional, Literal, List +from typing import List, Literal, Optional +import patito as pt import polars as pl import pytest -import patito as pt - def test_model_example_df(): """Test for patito.Model.example().""" @@ -35,8 +34,8 @@ class MyRow(pt.Model): } ) - assert df_1[correct_df.columns].frame_equal(correct_df) - assert df_2[correct_df.columns].frame_equal(correct_df) + assert df_1[correct_df.columns].equals(correct_df) + assert df_2[correct_df.columns].equals(correct_df) # A TypeError should be raised when you provide wrong keywords with pytest.raises( @@ -127,7 +126,7 @@ class DefaultEnumModel(pt.Model): # Workaround for pl.StringCache() not working here for some reason assert correct_example_df.dtypes == example_df.dtypes - assert example_df.select(pl.all().cast(pl.Utf8)).frame_equal( + assert example_df.select(pl.all().cast(pl.Utf8)).equals( correct_example_df.select(pl.all().cast(pl.Utf8)) ) diff --git a/tests/test_model.py b/tests/test_model.py index 644b61a..3c609bd 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -3,15 +3,21 @@ import enum import re from datetime import date, datetime, timedelta -from typing import List, Optional, Type, Literal +from typing import List, Literal, Optional, Type +import patito as pt import polars as pl import pytest +from patito._pydantic.dtypes import ( + DATE_DTYPES, + DATETIME_DTYPES, + DURATION_DTYPES, + FLOAT_DTYPES, + INTEGER_DTYPES, + DataTypeGroup, +) from pydantic import ValidationError -import patito as pt -from patito.pydantic import PL_INTEGER_DTYPES - def test_model_example(): """Test for Model.example().""" @@ -199,74 +205,15 @@ class CompleteModel(pt.Model): } assert CompleteModel.valid_dtypes == { - "str_column": [pl.Utf8], - "int_column": [ - pl.Int64, - pl.Int32, - pl.Int16, - pl.Int8, - pl.UInt64, - pl.UInt32, - pl.UInt16, - pl.UInt8, - ], - "float_column": [pl.Float64, pl.Float32], - "bool_column": [pl.Boolean], - "date_column": [pl.Date], - "datetime_column": [pl.Datetime], - "duration_column": [pl.Duration], - "categorical_column": [pl.Categorical, pl.Utf8], - "null_column": [pl.Null], - } - - -def test_mapping_to_polars_dtypes_with_lists(): - """Model list fields should be mappable to polars dtypes.""" - - class CompleteModel(pt.Model): - str_column: List[str] - int_column: List[int] - float_column: List[float] - bool_column: List[bool] - - date_column: List[date] - datetime_column: List[datetime] - duration_column: List[timedelta] - - categorical_column: List[Literal["a", "b", "c"]] - null_column: List[None] - - assert CompleteModel.dtypes == { - "str_column": pl.List(pl.Utf8), - "int_column": pl.List(pl.Int64), - "float_column": pl.List(pl.Float64), - "bool_column": pl.List(pl.Boolean), - "date_column": pl.List(pl.Date), - "datetime_column": pl.List(pl.Datetime), - "duration_column": pl.List(pl.Duration), - "categorical_column": pl.List(pl.Categorical), - "null_column": pl.List(pl.Null), - } - - assert CompleteModel.valid_dtypes == { - "str_column": [pl.List(pl.Utf8)], - "int_column": [ - pl.List(pl.Int64), - pl.List(pl.Int32), - pl.List(pl.Int16), - pl.List(pl.Int8), - pl.List(pl.UInt64), - pl.List(pl.UInt32), - pl.List(pl.UInt16), - pl.List(pl.UInt8), - ], - "float_column": [pl.List(pl.Float64), pl.List(pl.Float32)], - "bool_column": [pl.List(pl.Boolean)], - "date_column": [pl.List(pl.Date)], - "datetime_column": [pl.List(pl.Datetime)], - "duration_column": [pl.List(pl.Duration)], - "categorical_column": [pl.List(pl.Categorical), pl.List(pl.Utf8)], - "null_column": [pl.List(pl.Null)], + "str_column": {pl.Utf8}, + "int_column": DataTypeGroup(INTEGER_DTYPES | FLOAT_DTYPES), + "float_column": FLOAT_DTYPES, + "bool_column": {pl.Boolean}, + "date_column": DATE_DTYPES, + "datetime_column": DATETIME_DTYPES, + "duration_column": DURATION_DTYPES, + "categorical_column": {pl.Categorical, pl.Utf8}, + "null_column": {pl.Null}, } @@ -418,71 +365,40 @@ class MultiTypedEnum(enum.Enum): ONE = 1 TWO = "2" - class InvalidEnumModel(pt.Model): - column: MultiTypedEnum + with pytest.raises(TypeError, match="Mixed type enums not supported"): + + class InvalidEnumModel(pt.Model): + column: MultiTypedEnum if pt._DUCKDB_AVAILABLE: # pragma: no cover assert EnumModel.sql_types["column"].startswith("enum__") with pytest.raises(TypeError, match=r".*Encountered types: \['int', 'str'\]\."): - InvalidEnumModel.sql_types - - -# TODO new tests for ColumnInfo -# def test_pt_fields(): -# class Model(pt.Model): -# a: int -# b: int = pt.Field(constraints=[(pl.col("b") < 10)]) -# c: int = pt.Field(derived_from=pl.col("a") + pl.col("b")) -# d: int = pt.Field(dtype=pl.UInt8) -# e: int = pt.Field(unique=True) - -# schema = Model.model_json_schema() # no serialization issues -# props = ( -# Model._schema_properties() -# ) # extra fields are stored in modified schema_properties -# assert "constraints" in props["b"] -# assert "derived_from" in props["c"] -# assert "dtype" in props["d"] -# assert "unique" in props["e"] - -# def check_repr(field, set_value: str) -> None: -# assert f"{set_value}=" in repr(field) -# assert all(x not in repr(field) for x in get_args(ColumnInfo.model_fields) if x != set_value) - -# fields = ( -# Model.model_fields -# ) # attributes are properly set and catalogued on the `FieldInfo` objects -# assert "constraints" in fields["b"]._attributes_set -# assert fields["b"].constraints is not None -# check_repr(fields["b"], "constraints") -# assert "derived_from" in fields["c"]._attributes_set -# assert fields["c"].derived_from is not None -# check_repr(fields["c"], "derived_from") -# assert "dtype" in fields["d"]._attributes_set -# assert fields["d"].dtype is not None -# check_repr(fields["d"], "dtype") -# assert "unique" in fields["e"]._attributes_set -# assert fields["e"].unique is not None -# check_repr(fields["e"], "unique") - - -# def test_custom_field_info(): -# class FieldExt(BaseModel): -# foo: str | None = _Unset - -# Field = field(exts=[FieldExt]) - -# class Model(pt.Model): -# bar: int = Field(foo="hello") - -# test_field = Model.model_fields["bar"] -# assert ( -# test_field.foo == "hello" -# ) # TODO passes but typing is unhappy here, can we make custom FieldInfo configurable? If users subclass `Model` then it is easy to reset the typing to point at their own `FieldInfo` implementation -# assert "foo=" in repr(test_field) -# assert "foo" in Model._schema_properties()["bar"] -# with pytest.raises(AttributeError): -# print(test_field.derived_from) # patito FieldInfo successfully overriden + InvalidEnumModel.sql_types # pyright: ignore + + +def test_column_infos(): + class Model(pt.Model): + a: int + b: int = pt.Field(constraints=[(pl.col("b") < 10)]) + c: int = pt.Field(derived_from=pl.col("a") + pl.col("b")) + d: int = pt.Field(dtype=pl.UInt8) + e: int = pt.Field(unique=True) + + schema = Model.model_json_schema() # no serialization issues + props = schema[ + "properties" + ] # extra fields are stored in modified schema_properties + for col in ["b", "c", "d", "e"]: + assert "column_info" in props[col] + assert props["b"]["column_info"]["constraints"] is not None + assert props["c"]["column_info"]["derived_from"] is not None + assert props["d"]["column_info"]["dtype"] is not None + assert props["e"]["column_info"]["unique"] is not None + infos = Model.column_infos + assert infos["b"].constraints is not None + assert infos["c"].derived_from is not None + assert infos["d"].dtype is not None + assert infos["e"].unique is not None def test_nullable_columns(): @@ -500,35 +416,20 @@ class Test2(pt.Model): def test_conflicting_type_dtype(): - class Test1(pt.Model): - foo: int = pt.Field(dtype=pl.Utf8) + with pytest.raises(ValueError, match="Invalid dtype Utf8") as e: - with pytest.raises(ValueError) as e: - Test1.valid_dtypes - assert ( - f"Invalid dtype Utf8 for column 'foo'. Allowable polars dtypes for int are: {', '.join(str(x) for x in PL_INTEGER_DTYPES)}." - == str(e.value) - ) + class Test1(pt.Model): + foo: int = pt.Field(dtype=pl.Utf8) - class Test2(pt.Model): - foo: str = pt.Field(dtype=pl.Float32) + with pytest.raises(ValueError, match="Invalid dtype Float32") as e: - with pytest.raises(ValueError) as e: - Test2.valid_dtypes - assert ( - "Invalid dtype Float32 for column 'foo'. Allowable polars dtypes for str are: Utf8." - == str(e.value) - ) + class Test2(pt.Model): + foo: str = pt.Field(dtype=pl.Float32) - class Test3(pt.Model): - foo: str | None = pt.Field(dtype=pl.UInt32) + with pytest.raises(ValueError, match="Invalid dtype UInt32") as e: - with pytest.raises(ValueError) as e: - Test3.valid_dtypes - assert ( - "Invalid dtype UInt32 for column 'foo'. Allowable polars dtypes for Union[str, NoneType] are: Utf8." - == str(e.value) - ) + class Test3(pt.Model): + foo: str | None = pt.Field(dtype=pl.UInt32) def test_polars_python_type_harmonization(): @@ -536,6 +437,4 @@ class Test(pt.Model): date: datetime = pt.Field(dtype=pl.Datetime(time_unit="us")) # TODO add more other lesser-used type combinations here - assert type(Test.valid_dtypes["date"][0]) == pl.Datetime - assert Test.valid_dtypes["date"][0].time_unit == "us" - assert Test.valid_dtypes["date"][0].time_zone is None + assert Test.valid_dtypes["date"] == {pl.Datetime(time_unit="us")} diff --git a/tests/test_polars.py b/tests/test_polars.py index 2eb094b..668aab9 100644 --- a/tests/test_polars.py +++ b/tests/test_polars.py @@ -2,12 +2,11 @@ import re from datetime import date, datetime +import patito as pt import polars as pl import pytest from pydantic import ValidationError -import patito as pt - def test_dataframe_get_method(): """You should be able to retrieve a single row and cast to model.""" @@ -67,7 +66,7 @@ class DefaultModel(pt.Model): missing_df = pt.DataFrame({"foo": [1, None], "bar": [None, "provided"]}) filled_df = missing_df.set_model(DefaultModel).fill_null(strategy="defaults") correct_filled_df = pt.DataFrame({"foo": [1, 2], "bar": ["default", "provided"]}) - assert filled_df.frame_equal(correct_filled_df) + assert filled_df.equals(correct_filled_df) def test_preservation_of_model(): @@ -111,21 +110,15 @@ def test_dataframe_model_dtype_casting(): """You should be able to cast columns according to model type annotations.""" class DTypeModel(pt.Model): - implicit_int_1: int implicit_int_2: int - explicit_uint: int = pt.Field(dtype=pl.UInt64) implicit_date: date implicit_datetime: datetime original_df = DTypeModel.DataFrame().with_columns( [ - # This float will be casted to an integer, and since no specific integer - # dtype is specified, the default pl.Int64 will be used. - pl.lit(1.0).cast(pl.Float64).alias("implicit_int_1"), # UInt32 is compatible with the "int" annotation, and since no explicit # dtype is specified, it will not be casted to the default pl.Int64 pl.lit(1).cast(pl.UInt32).alias("implicit_int_2"), - pl.lit(1.0).cast(pl.Float64).alias("explicit_uint"), # The integer will be casted to datetime 1970-01-01 00:00:00 pl.lit(0).cast(pl.Int64).alias("implicit_date"), # The integer will be casted to date 1970-01-01 @@ -136,9 +129,7 @@ class DTypeModel(pt.Model): ) casted_df = original_df.cast() assert casted_df.dtypes == [ - pl.Int64, pl.UInt32, - pl.UInt64, pl.Date, pl.Datetime, pl.Boolean, @@ -147,8 +138,6 @@ class DTypeModel(pt.Model): strictly_casted_df = original_df.cast(strict=True) assert strictly_casted_df.dtypes == [ pl.Int64, - pl.Int64, - pl.UInt64, pl.Date, pl.Datetime, pl.Boolean, @@ -194,7 +183,7 @@ class DerivedModel(pt.Model): csv_path.write_text("month,dollars\n1,2.99") derived_df = DerivedModel.DataFrame.read_csv(csv_path) - assert derived_df.frame_equal( + assert derived_df.equals( DerivedModel.DataFrame({"month": [1], "dollars": [2.99], "cents": [299]}) ) @@ -221,13 +210,13 @@ class DerivedModel(pt.Model): "second_order_derived": [4, 8], } ) - assert derived_df.frame_equal(correct_derived_df) + assert derived_df.equals(correct_derived_df) # Non-compatible derive_from arguments should raise TypeError with pytest.raises(ValidationError): class InvalidModel(pt.Model): - incompatible: int = pt.Field(derived_from=object) + incompatible: int = pt.Field(derived_from=object) # pyright: ignore def test_drop_method(): diff --git a/tests/test_validators.py b/tests/test_validators.py index 127cb01..7d6af23 100644 --- a/tests/test_validators.py +++ b/tests/test_validators.py @@ -1,14 +1,13 @@ """Tests for the patito.validators module.""" import enum +import re import sys from datetime import date, datetime -from typing import List, Optional, Union, Literal -import re +from typing import List, Literal, Optional, Union +import patito as pt import polars as pl import pytest - -import patito as pt from patito import DataFrameValidationError from patito.validators import _dewrap_optional, _is_optional, validate @@ -143,7 +142,7 @@ class IntModel(pt.Model): validate(dataframe=dataframe, schema=IntModel) # But other types, including floating point types, must be considered invalid - for dtype in (pl.Utf8, pl.Date, pl.Float32, pl.Float64): + for dtype in (pl.Utf8, pl.Date): series = pl.Series([], dtype=dtype).alias("column") dataframe = pl.DataFrame([series]) with pytest.raises(DataFrameValidationError) as e_info: @@ -182,7 +181,7 @@ class CompleteModel(pt.Model): # We try to hit each column dtype check for column in CompleteModel.columns: if column == "int_column": - dtype = pl.Float64 + dtype = pl.Utf8 else: dtype = pl.Int64 @@ -202,24 +201,16 @@ class CompleteModel(pt.Model): } # Anything non-compatible with polars should raise NotImplementedError - class NonCompatibleModel(pt.Model): - my_field: object + with pytest.raises(ValueError, match="not compatible with any polars dtypes"): - with pytest.raises( - NotImplementedError, - match="No valid dtype mapping found for column 'my_field'.", - ): - NonCompatibleModel.valid_dtypes + class NonCompatibleModel(pt.Model): # TODO catch value error + my_field: object # The same goes for list-annotated fields - class NonCompatibleListModel(pt.Model): - my_field: List[object] + with pytest.raises(ValueError, match="not compatible with any polars dtypes"): - with pytest.raises( - NotImplementedError, - match="No valid dtype mapping found for column 'my_field'.", - ): - NonCompatibleListModel.valid_dtypes + class NonCompatibleListModel(pt.Model): + my_field: List[object] # It should also work with pandas data frames class PandasCompatibleModel(CompleteModel): @@ -441,7 +432,7 @@ class DTypeModel(pt.Model): DTypeModel.validate(valid_df) invalid = [ - pl.Series([2.5]).cast(pl.Float64), + pl.Series(["a"]).cast(pl.Utf8), pl.Series([2.5]).cast(pl.Float64), pl.Series([2**32]).cast(pl.Int64), pl.Series([-2]).cast(pl.Int64), @@ -450,7 +441,7 @@ class DTypeModel(pt.Model): for column_index, (column_name, dtype) in enumerate( zip( DTypeModel.columns, - [pl.Float64, pl.Float64, pl.Int64, pl.Int64, pl.Int64], + [pl.Utf8, pl.Float64, pl.Int64, pl.Int64, pl.Int64], ) ): data = ( From 054d03452adf348e8c535d8725c5ea02126fb44c Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Wed, 7 Feb 2024 11:57:07 -0500 Subject: [PATCH 21/29] chore: remove database/duckdb support --- pyproject.toml | 7 +- src/patito/_pydantic/dtypes.py | 10 +- src/patito/database.py | 658 ------- src/patito/duckdb.py | 2793 ---------------------------- src/patito/pydantic.py | 7 +- tests/test_database.py | 568 ------ tests/test_dtypes.py | 1 - tests/test_duckdb/__init__.py | 0 tests/test_duckdb/test_database.py | 276 --- tests/test_duckdb/test_relation.py | 1063 ----------- tests/test_model.py | 2 +- 11 files changed, 10 insertions(+), 5375 deletions(-) delete mode 100644 src/patito/database.py delete mode 100644 src/patito/duckdb.py delete mode 100644 tests/test_database.py delete mode 100644 tests/test_duckdb/__init__.py delete mode 100644 tests/test_duckdb/test_database.py delete mode 100644 tests/test_duckdb/test_relation.py diff --git a/pyproject.toml b/pyproject.toml index 3716242..4465839 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -136,4 +136,9 @@ module = ["tests.test_validators"] warn_unused_ignores = false [tool.ruff.lint] -select = ["I"] \ No newline at end of file +select = ["E4", "E7", "E9", "F", "I"] +ignore = [] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] \ No newline at end of file diff --git a/src/patito/_pydantic/dtypes.py b/src/patito/_pydantic/dtypes.py index d806796..0a19d7d 100644 --- a/src/patito/_pydantic/dtypes.py +++ b/src/patito/_pydantic/dtypes.py @@ -1,16 +1,11 @@ -import itertools from enum import Enum from typing import ( Any, Dict, FrozenSet, List, - Literal, Optional, Sequence, - Tuple, - cast, - get_args, ) import polars as pl @@ -70,8 +65,7 @@ class PydanticStringFormat(Enum): def parse_composite_dtype(dtype: DataTypeClass | DataType) -> str: - """for serialization, converts polars dtype to string representation - """ + """for serialization, converts polars dtype to string representation""" if dtype in pl.NESTED_DTYPES: if dtype == pl.Struct or isinstance(dtype, pl.Struct): raise NotImplementedError("Structs not yet supported by patito") @@ -156,7 +150,7 @@ def valid_polars_dtypes_for_annotation( Args: annotation (type[Any] | None): python type annotation - + Returns: FrozenSet[DataTypeClass | DataType]: set of polars dtypes """ diff --git a/src/patito/database.py b/src/patito/database.py deleted file mode 100644 index 3477d79..0000000 --- a/src/patito/database.py +++ /dev/null @@ -1,658 +0,0 @@ -"""Module containing utilities for retrieving data from external databases.""" -import glob -import hashlib -import inspect -import re -from datetime import datetime, timedelta -from functools import wraps -from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Generic, - Optional, - Type, - TypeVar, - Union, - cast, - overload, -) - -import polars as pl -import pyarrow as pa # type: ignore[import] -import pyarrow.parquet as pq # type: ignore[import] -from typing_extensions import Literal, ParamSpec, Protocol - -from patito import xdg - -if TYPE_CHECKING: - from patito import Model - - -P = ParamSpec("P") -DF = TypeVar("DF", bound=Union[pl.DataFrame, pl.LazyFrame], covariant=True) - -# Increment this integer whenever you make backwards-incompatible changes to -# the parquet caching implemented in WrappedQueryFunc, then such caches -# are ejected the next time the wrapper tries to read from them. -CACHE_VERSION = 1 - - -class QueryConstructor(Protocol[P]): - """A function taking arbitrary arguments and returning an SQL query string.""" - - __name__: str - - def __call__(self, *args: P.args, **kwargs: P.kwargs) -> str: - """ - Return SQL query constructed from the given parameters. - - Args: - *args: Positional arguments used to build SQL query. - **kwargs: Keyword arguments used to build SQL query. - """ - ... # pragma: no cover - - -class DatabaseQuery(Generic[P, DF]): - """A class acting as a function that returns a polars.DataFrame when called.""" - - _cache: Union[bool, Path] - - def __init__( # noqa: C901 - self, - query_constructor: QueryConstructor[P], - cache_directory: Path, - query_handler: Callable[..., pa.Table], - ttl: timedelta, - lazy: bool = False, - cache: Union[str, Path, bool] = False, - model: Union[Type["Model"], None] = None, - query_handler_kwargs: Optional[Dict[Any, Any]] = None, - ) -> None: - """ - Convert SQL string query function to polars.DataFrame function. - - Args: - query_constructor: A function that takes arbitrary arguments and returns - an SQL query string. - cache_directory: Path to directory to store parquet cache files in. - query_handler: Function used to execute SQL queries and return arrow - tables. - ttl: See Database.query for documentation. - lazy: See Database.query for documentation. - cache: See Database.query for documentation. - model: See Database.query for documentation. - query_handler_kwargs: Arbitrary keyword arguments forwarded to the provided - query handler. - - Raises: - ValueError: If the given path does not have a '.parquet' file extension. - """ - if not isinstance(cache, bool) and Path(cache).suffix != ".parquet": - raise ValueError("Cache paths must have the '.parquet' file extension!") - - if isinstance(cache, (Path, str)): - self._cache = cache_directory.joinpath(cache) - else: - self._cache = cache - self._query_constructor = query_constructor - self.cache_directory = cache_directory - - self._query_handler_kwargs = query_handler_kwargs or {} - # Unless explicitly specified otherwise by the end-user, we retrieve query - # results as arrow tables with column types directly supported by polars. - # Otherwise the resulting parquet files that are written to disk can not be - # lazily read with polars.scan_parquet. - self._query_handler_kwargs.setdefault("cast_to_polars_equivalent_types", True) - - # We construct the new function with the same parameter signature as - # wrapped_function, but with polars.DataFrame as the return type. - @wraps(query_constructor) - def cached_func(*args: P.args, **kwargs: P.kwargs) -> DF: - query = query_constructor(*args, **kwargs) - cache_path = self.cache_path(*args, **kwargs) - if cache_path and cache_path.exists(): - metadata: Dict[bytes, bytes] = pq.read_schema(cache_path).metadata or {} - - # Check if the cache file was produced by an identical SQL query - is_same_query = metadata.get(b"query") == query.encode("utf-8") - - # Check if the cache is too old to be re-used - cache_created_time = datetime.fromisoformat( - metadata.get( - b"query_start_time", b"1900-01-01T00:00:00.000000" - ).decode("utf-8") - ) - is_fresh_cache = (datetime.now() - cache_created_time) < ttl - - # Check if the cache was produced by an incompatible version - cache_version = int.from_bytes( - metadata.get( - b"cache_version", - (0).to_bytes(length=16, byteorder="little", signed=False), - ), - byteorder="little", - signed=False, - ) - is_compatible_version = cache_version >= CACHE_VERSION - - if is_same_query and is_fresh_cache and is_compatible_version: - if lazy: - return pl.scan_parquet(cache_path) # type: ignore - else: - return pl.read_parquet(cache_path) # type: ignore - - arrow_table = query_handler(query, **self._query_handler_kwargs) - if cache_path: - cache_path.parent.mkdir(parents=True, exist_ok=True) - # We write the cache *before* any potential model validation since - # we don't want to lose the result of an expensive query just because - # the model specification is wrong. - # We also use pyarrow.parquet.write_table instead of - # polars.write_parquet since we want to write the arrow table's metadata - # to the parquet file, such as the executed query, time, etc.. - # This metadata is not preserved by polars. - metadata = arrow_table.schema.metadata - metadata[ - b"wrapped_function_name" - ] = self._query_constructor.__name__.encode("utf-8") - # Store the cache version as an 16-bit unsigned little-endian number - metadata[b"cache_version"] = CACHE_VERSION.to_bytes( - length=16, - byteorder="little", - signed=False, - ) - pq.write_table( - table=arrow_table.replace_schema_metadata(metadata), - where=cache_path, - # In order to support nanosecond-resolution timestamps, we must - # use parquet version >= 2.6. - version="2.6", - ) - - polars_df = cast(pl.DataFrame, pl.from_arrow(arrow_table)) - if model: - model.validate(polars_df) - - if lazy: - if cache_path: - # Delete in-memory representation of data and read from the new - # parquet file instead. That way we get consistent memory pressure - # the first and subsequent times this function is invoked. - del polars_df, arrow_table - return pl.scan_parquet(source=cache_path) # type: ignore - else: - return polars_df.lazy() # type: ignore - else: - return polars_df # type: ignore - - self._cached_func = cached_func - - def cache_path(self, *args: P.args, **kwargs: P.kwargs) -> Optional[Path]: - """ - Return the path to the parquet cache that would store the result of the query. - - Args: - args: The positional arguments passed to the wrapped function. - kwargs: The keyword arguments passed to the wrapped function. - - Returns: - A deterministic path to a parquet cache. None if caching is disabled. - """ - # We convert args+kwargs to kwargs-only and use it to format the string - function_signature = inspect.signature(self._query_constructor) - bound_arguments = function_signature.bind(*args, **kwargs) - - if isinstance(self._cache, Path): - # Interpret relative paths relative to the main query cache directory - return Path(str(self._cache).format(**bound_arguments.arguments)) - elif self._cache is True: - directory: Path = self.cache_directory / self._query_constructor.__name__ - directory.mkdir(exist_ok=True, parents=True) - sql_query = self.query_string(*args, **kwargs) - sql_query_hash = hashlib.sha1( # noqa: S324,S303 - sql_query.encode("utf-8") - ).hexdigest() - return directory / f"{sql_query_hash}.parquet" - else: - return None - - def __call__(self, *args: P.args, **kwargs: P.kwargs) -> DF: # noqa: D102 - return self._cached_func(*args, **kwargs) - - def query_string(self, *args: P.args, **kwargs: P.kwargs) -> str: - """ - Return the query to be executed for the given parameters. - - Args: - *args: Positional arguments used to construct the query string. - *kwargs: Keyword arguments used to construct the query string. - - Returns: - The query string produced for the given input parameters. - """ - return self._query_constructor(*args, **kwargs) - - def refresh_cache(self, *args: P.args, **kwargs: P.kwargs) -> DF: - """ - Force query execution by refreshing the cache. - - Args: - *args: Positional arguments used to construct the SQL query string. - *kwargs: Keyword arguments used to construct the SQL query string. - - Returns: - A DataFrame representing the result of the newly executed query. - """ - cache_path = self.cache_path(*args, **kwargs) - if cache_path and cache_path.exists(): - cache_path.unlink() - return self._cached_func(*args, **kwargs) - - def clear_caches(self) -> None: - """Delete all parquet cache files produced by this query wrapper.""" - if self._cache is False: - # Caching is not enabled, so this is simply a no-op - return - - if self._cache is True: - glob_pattern = str( - self.cache_directory / self._query_constructor.__name__ / "*.parquet" - ) - else: - # We replace all formatting specifiers of the form '{variable}' with - # recursive globs '**' (in case strings containing '/' are inserted) and - # search for all occurrences of such file paths. - # For example if cache="{a}/{b}.parquet" is specified, we search for - # all files matching the glob pattern '**/**.parquet'. - glob_pattern = re.sub( # noqa: PD005 - # We specify the reluctant qualifier (?) in order to get narrow matches - pattern=r"\{.+?\}", - repl="**", - string=str(self._cache), - ) - - for parquet_path in glob.iglob(glob_pattern): - try: - metadata: Dict[bytes, bytes] = ( - pq.read_schema(where=parquet_path).metadata or {} - ) - if metadata.get( - b"wrapped_function_name" - ) == self._query_constructor.__name__.encode("utf-8"): - Path(parquet_path).unlink() - except Exception: # noqa: S112 - # If we can't read the parquet metadata for some reason, - # it is probably not a cache anyway. - continue - - -class Database: - """ - Construct manager for executing SQL queries and caching the results. - - Args: - query_handler: The function that the Database object should use for executing - SQL queries. Its first argument should be the SQL query string to execute, - and it should return the query result as an arrow table, for instance - pyarrow.Table. - cache_directory: Path to the directory where caches should be stored as parquet - files. If not provided, the `XDG`_ Base Directory Specification will be - used to determine the suitable cache directory, by default - ``~/.cache/patito`` or ``${XDG_CACHE_HOME}/patito``. - default_ttl: The default Time To Live (TTL), or with other words, how long to - wait until caches are refreshed due to old age. The given default TTL can be - overwritten by specifying the ``ttl`` parameter in - :func:`Database.query`. The default is 52 weeks. - - Examples: - We start by importing the necessary modules: - - >>> from pathlib import Path - ... - >>> import patito as pt - >>> import pyarrow as pa - - In order to construct a ``Database``, we need to provide the constructor with - a function that can *execute* query strings. How to construct this function will - depend on what you actually want to run your queries against, for example a - local or remote database. For the purposes of demonstration we will use - SQLite since it is built into Python's standard library, but you can use - anything; for example Snowflake or PostgresQL. - - We will use Python's standard library - `documentation `_ - to create an in-memory SQLite database. - It will contain a single table named ``movies`` containing some dummy data. - The details do not really matter here, the only important part is that we - construct a database which we can run SQL queries against. - - >>> import sqlite3 - ... - >>> def dummy_database() -> sqlite3.Cursor: - ... connection = sqlite3.connect(":memory:") - ... cursor = connection.cursor() - ... cursor.execute("CREATE TABLE movies(title, year, score)") - ... data = [ - ... ("Monty Python Live at the Hollywood Bowl", 1982, 7.9), - ... ("Monty Python's The Meaning of Life", 1983, 7.5), - ... ("Monty Python's Life of Brian", 1979, 8.0), - ... ] - ... cursor.executemany("INSERT INTO movies VALUES(?, ?, ?)", data) - ... connection.commit() - ... return cursor - - Using this dummy database, we are now able to construct a function which accepts - SQL queries as its first parameter, executes the query, and returns the query - result in the form of an Arrow table. - - >>> def query_handler(query: str) -> pa.Table: - ... cursor = dummy_database() - ... cursor.execute(query) - ... columns = [description[0] for description in cursor.description] - ... data = [dict(zip(columns, row)) for row in cursor.fetchall()] - ... return pa.Table.from_pylist(data) - - We can now construct a ``Database`` object, providing ``query_handler`` - as the way to execute SQL queries. - - >>> db = pt.Database(query_handler=query_handler) - - The resulting object can now be used to execute SQL queries against the database - and return the result in the form of a polars ``DataFrame`` object. - - >>> db.query("select * from movies order by year limit 1") - shape: (1, 3) - ┌──────────────────────────────┬──────┬───────┐ - │ title ┆ year ┆ score │ - │ --- ┆ --- ┆ --- │ - │ str ┆ i64 ┆ f64 │ - ╞══════════════════════════════╪══════╪═══════╡ - │ Monty Python's Life of Brian ┆ 1979 ┆ 8.0 │ - └──────────────────────────────┴──────┴───────┘ - - But the main way to use a ``Database`` object is to use the - ``@Database.as_query`` decarator to wrap functions which return SQL - query *strings*. - - >>> @db.as_query() - >>> def movies(newer_than_year: int): - ... return f"select * from movies where year > {newer_than_year}" - - This decorator will convert the function from producing query strings, to - actually executing the given query and return the query result in the form of - a polars ``DataFrame`` object. - - >>> movies(newer_than_year=1980) - shape: (2, 3) - ┌───────────────────────────────────┬──────┬───────┐ - │ title ┆ year ┆ score │ - │ --- ┆ --- ┆ --- │ - │ str ┆ i64 ┆ f64 │ - ╞═══════════════════════════════════╪══════╪═══════╡ - │ Monty Python Live at the Hollywo… ┆ 1982 ┆ 7.9 │ - │ Monty Python's The Meaning of Li… ┆ 1983 ┆ 7.5 │ - └───────────────────────────────────┴──────┴───────┘ - - Caching is not enabled by default, but it can be enabled by specifying - ``cache=True`` to the ``@db.as_query(...)`` decorator. Other arguments are - also accepted, such as ``lazy=True`` if you want to retrieve the results in the - form of a ``LazyFrame`` instead of a ``DataFrame``, ``ttl`` if you want to - specify another TTL, and any additional keyword arguments are forwarded to - ``query_executor`` when the SQL query is executed. You can read more about these - parameters in the documentation of :func:`Database.query`. - - .. _XDG: https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html - """ - - Query = DatabaseQuery - - def __init__( # noqa: D107 - self, - query_handler: Callable[..., pa.Table], - cache_directory: Optional[Path] = None, - default_ttl: timedelta = timedelta(weeks=52), # noqa: B008 - ) -> None: - self.query_handler = query_handler - self.cache_directory = cache_directory or xdg.cache_home(application="patito") - self.default_ttl = default_ttl - - self.cache_directory.mkdir(exist_ok=True, parents=True) - - # With lazy = False a DataFrame-producing wrapper is returned - @overload - def as_query( - self, - *, - lazy: Literal[False] = False, - cache: Union[str, Path, bool] = False, - ttl: Optional[timedelta] = None, - model: Union[Type["Model"], None] = None, - **kwargs: Any, # noqa: ANN401 - ) -> Callable[[QueryConstructor[P]], DatabaseQuery[P, pl.DataFrame]]: - ... # pragma: no cover - - # With lazy = True a LazyFrame-producing wrapper is returned - @overload - def as_query( - self, - *, - lazy: Literal[True], - cache: Union[str, Path, bool] = False, - ttl: Optional[timedelta] = None, - model: Union[Type["Model"], None] = None, - **kwargs: Any, # noqa: ANN401 - ) -> Callable[[QueryConstructor[P]], DatabaseQuery[P, pl.LazyFrame]]: - ... # pragma: no cover - - def as_query( - self, - *, - lazy: bool = False, - cache: Union[str, Path, bool] = False, - ttl: Optional[timedelta] = None, - model: Union[Type["Model"], None] = None, - **kwargs: Any, # noqa: ANN401 - ) -> Callable[ - [QueryConstructor[P]], DatabaseQuery[P, Union[pl.DataFrame, pl.LazyFrame]] - ]: - """ - Execute the returned query string and return a polars dataframe. - - Args: - lazy: If the result should be returned as a LazyFrame rather than a - DataFrame. Allows more efficient reading from parquet caches if caching - is enabled. - cache: If queries should be cached in order to save time and costs. - The cache will only be used if the exact same SQL string has - been executed before. - If the parameter is specified as ``True``, a parquet file is - created for each unique query string, and is located at: - artifacts/query_cache//.parquet - If the a string or ``pathlib.Path`` object is provided, the given path - will be used, but it must have a '.parquet' file extension. - Relative paths are interpreted relative to artifacts/query_cache/ - in the workspace root. The given parquet path will be overwritten - if the query string changes, so only the latest query string value - will be cached. - ttl: The Time to Live (TTL) of the cache specified as a datetime.timedelta - object. When the cache becomes older than the specified TTL, the query - will be re-executed on the next invocation of the query function - and the cache will refreshed. - model: An optional Patito model used to validate the content of the - dataframe before return. - **kwargs: Connection parameters forwarded to sql_to_polars, for example - db_params. - - Returns: - A new function which returns a polars DataFrame based on the query - specified by the original function's return string. - """ - - def wrapper(query_constructor: QueryConstructor) -> DatabaseQuery: - return self.Query( - query_constructor=query_constructor, - lazy=lazy, - cache=cache, - ttl=ttl if ttl is not None else self.default_ttl, - cache_directory=self.cache_directory, - model=model, - query_handler=_with_query_metadata(self.query_handler), - query_handler_kwargs=kwargs, - ) - - return wrapper - - # With lazy=False, a DataFrame is returned - @overload - def query( - self, - query: str, - *, - lazy: Literal[False] = False, - cache: Union[str, Path, bool] = False, - ttl: Optional[timedelta] = None, - model: Union[Type["Model"], None] = None, - **kwargs: Any, # noqa: ANN401 - ) -> pl.DataFrame: - ... # pragma: no cover - - # With lazy=True, a LazyFrame is returned - @overload - def query( - self, - query: str, - *, - lazy: Literal[True], - cache: Union[str, Path, bool] = False, - ttl: Optional[timedelta] = None, - model: Union[Type["Model"], None] = None, - **kwargs: Any, # noqa: ANN401 - ) -> pl.LazyFrame: - ... # pragma: no cover - - def query( - self, - query: str, - *, - lazy: bool = False, - cache: Union[str, Path, bool] = False, - ttl: Optional[timedelta] = None, - model: Union[Type["Model"], None] = None, - **kwargs: Any, # noqa: ANN401 - ) -> Union[pl.DataFrame, pl.LazyFrame]: - """ - Execute the given query and return the query result as a DataFrame or LazyFrame. - - See :ref:`Database.as_query` for a more powerful way to build and execute - queries. - - Args: - query: The query string to be executed, for instance an SQL query. - lazy: If the query result should be returned in the form of a LazyFrame - instead of a DataFrame. - cache: If the query result should be saved and re-used the next time the - same query is executed. Can also be provided as a path. See - :func:`Database.as_query` for full documentation. - ttl: How long to use cached results until the query is re-executed anyway. - model: A :ref:`Model` to optionally validate the query result. - **kwargs: All additional keyword arguments are forwarded to the query - handler which executes the given query. - - Returns: - The result of the query in the form of a ``DataFrame`` if ``lazy=False``, or - a ``LazyFrame`` otherwise. - - Examples: - We will use DuckDB as our example database. - - >>> import duckdb - >>> import patito as pt - - We will construct a really simple query source from an in-memory database. - - >>> db = duckdb.connect(":memory:") - >>> query_handler = lambda query: db.cursor().query(query).arrow() - >>> query_source = pt.Database(query_handler=query_handler) - - We can now use :func:`Database.query` in order to execute queries against - the in-memory database. - - >>> query_source.query("select 1 as a, 2 as b, 3 as c") - shape: (1, 3) - ┌─────┬─────┬─────┐ - │ a ┆ b ┆ c │ - │ --- ┆ --- ┆ --- │ - │ i32 ┆ i32 ┆ i32 │ - ╞═════╪═════╪═════╡ - │ 1 ┆ 2 ┆ 3 │ - └─────┴─────┴─────┘ - """ - - def __direct_query() -> str: - """ - A regular named function in order to store parquet files correctly. - - Returns: - The user-provided query string. - """ - return query - - return self.as_query( - lazy=lazy, # type: ignore - cache=cache, - ttl=ttl, - model=model, - **kwargs, - )(__direct_query)() - - -def _with_query_metadata(query_handler: Callable[P, pa.Table]) -> Callable[P, pa.Table]: - """ - Wrap SQL-query handler with additional logic. - - Args: - query_handler: Function accepting an SQL query as its first argument and - returning an Arrow table. - - Returns: - New function that returns Arrow table with additional metedata. Arrow types - which are not supported by polars directly have also been converted to - compatible ones where applicable. - """ - - @wraps(query_handler) - def wrapped_query_handler( - *args: P.args, - **kwargs: P.kwargs, - ) -> pa.Table: - cast_to_polars_equivalent_types = kwargs.pop( - "cast_to_polars_equivalent_types", True - ) - start_time = datetime.now() - arrow_table = query_handler(*args, **kwargs) - finish_time = datetime.now() - metadata: dict = arrow_table.schema.metadata or {} - if cast_to_polars_equivalent_types: - # We perform a round-trip to polars and back in order to get an arrow table - # with column types that are directly supported by polars. - arrow_table = pl.from_arrow(arrow_table).to_arrow() - - # Store additional metadata which is useful when the arrow table is written to a - # parquet file as a caching mechanism. - metadata.update( - { - "query": args[0], - "query_start_time": start_time.isoformat(), - "query_end_time": finish_time.isoformat(), - } - ) - return arrow_table.replace_schema_metadata(metadata) - - return wrapped_query_handler - - -__all__ = ["Database"] diff --git a/src/patito/duckdb.py b/src/patito/duckdb.py deleted file mode 100644 index 524916e..0000000 --- a/src/patito/duckdb.py +++ /dev/null @@ -1,2793 +0,0 @@ -""" -Module which wraps around the duckdb module in an opiniated manner. -""" -from __future__ import annotations - -import hashlib -from collections.abc import Collection, Iterable, Iterator -from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Generic, - List, - Optional, - Set, - Tuple, - Type, - TypeVar, - Union, - cast, -) - -import numpy as np -import polars as pl -import pyarrow as pa # type: ignore[import] -from pydantic import create_model -from typing_extensions import Literal - -from patito import sql -from patito.exceptions import MultipleRowsReturned, RowDoesNotExist -from patito.polars import DataFrame -from patito.pydantic import Model, ModelType - -try: - import pandas as pd - - _PANDAS_AVAILABLE = True -except ImportError: - _PANDAS_AVAILABLE = False - -if TYPE_CHECKING: - import duckdb - - -# Types which can be used to instantiate a DuckDB Relation object -RelationSource = Union[ - DataFrame, - pl.DataFrame, - "pd.DataFrame", - Path, - str, - "duckdb.DuckDBPyRelation", - "Relation", -] - -# Used to refer to type(self) in Relation methods which preserve the type. -# Hard-coding Relation or Relation[ModelType] does not work for subclasses -# that return type(self) since that will refer to the parent class. -# See relevant SO answer: https://stackoverflow.com/a/63178532 -RelationType = TypeVar("RelationType", bound="Relation") - -# The SQL types supported by DuckDB -# See: https://duckdb.org/docs/sql/data_types/overview -# fmt: off -DuckDBSQLType = Literal[ - "BIGINT", "INT8", "LONG", - "BLOB", "BYTEA", "BINARY", "VARBINARY", - "BOOLEAN", "BOOL", "LOGICAL", - "DATE", - "DOUBLE", "FLOAT8", "NUMERIC", "DECIMAL", - "HUGEINT", - "INTEGER", "INT4", "INT", "SIGNED", - "INTERVAL", - "REAL", "FLOAT4", "FLOAT", - "SMALLINT", "INT2", "SHORT", - "TIME", - "TIMESTAMP", "DATETIME", - "TIMESTAMP WITH TIMEZONE", "TIMESTAMPTZ", - "TINYINT", "INT1", - "UBIGINT", - "UINTEGER", - "USMALLINT", - "UTINYINT", - "UUID", - "VARCHAR", "CHAR", "BPCHAR", "TEXT", "STRING", -] -# fmt: on - -# Used for backward-compatible patches -POLARS_VERSION: Optional[Tuple[int, int, int]] -try: - POLARS_VERSION = cast( - Tuple[int, int, int], - tuple(map(int, pl.__version__.split("."))), - ) -except ValueError: # pragma: no cover - POLARS_VERSION = None - - -def create_pydantic_model(relation: "duckdb.DuckDBPyRelation") -> Type[Model]: - """Create pydantic model deserialization of the given relation.""" - pydantic_annotations = {column: (Any, ...) for column in relation.columns} - return create_model( # type: ignore - relation.alias, - __base__=Model, - **pydantic_annotations, # pyright: ignore - ) - - -def _enum_type_name(field_properties: dict) -> str: - """ - Return enum DuckDB SQL type name based on enum values. - - The same enum values, regardless of ordering, will always be given the same name. - """ - enum_values = ", ".join(repr(value) for value in sorted(field_properties["enum"])) - value_hash = hashlib.md5(enum_values.encode("utf-8")).hexdigest() # noqa: #S303 - return f"enum__{value_hash}" - - -def _is_missing_enum_type_exception(exception: BaseException) -> bool: - """ - Return True if the given exception might be caused by missing enum type definitions. - - Args: - exception: Exception raised by DuckDB. - - Returns: - True if the exception might be caused by a missing SQL enum type definition. - """ - description = str(exception) - # DuckDB version <= 0.3.4 - old_exception = description.startswith("Not implemented Error: DataType") - # DuckDB version >= 0.4.0 - new_exception = description.startswith("Catalog Error: Type with name enum_") - return old_exception or new_exception - - -class Relation(Generic[ModelType]): - # The database connection which the given relation belongs to - database: Database - - # The underlying DuckDB relation object which this class wraps around - _relation: duckdb.DuckDBPyRelation - - # Can be set by subclasses in order to specify the serialization class for rows. - # Must accept column names as keyword arguments. - model: Optional[Type[ModelType]] = None - - # The alias that can be used to refer to the relation in queries - alias: str - - def __init__( # noqa: C901 - self, - derived_from: RelationSource, - database: Optional[Database] = None, - model: Optional[Type[ModelType]] = None, - ) -> None: - """ - Create a new relation object containing data to be queried with DuckDB. - - Args: - derived_from: Data to be represented as a DuckDB relation object. - Can be one of the following types: - - - A pandas or polars DataFrame. - - An SQL query represented as a string. - - A ``Path`` object pointing to a CSV or a parquet file. - The path must point to an existing file with either a ``.csv`` - or ``.parquet`` file extension. - - A native DuckDB relation object (``duckdb.DuckDBPyRelation``). - - A ``patito.duckdb.Relation`` object. - - database: Which database to load the relation into. If not provided, - the default DuckDB database will be used. - - model: Sub-class of ``patito.Model`` which specifies how to deserialize rows - when fetched with methods such as - :ref:`Relation.get()` and ``__iter__()``. - - Will also be used to create a strict table schema if - :ref:`Relation.create_table()`. - schema should be constructed. - - If not provided, a dynamic model fitting the relation schema will be - created when required. - - Can also be set later dynamically by invoking - :ref:`Relation.set_model()`. - - Raises: - ValueError: If any one of the following cases are encountered: - - - If a provided ``Path`` object does not have a ``.csv`` or - ``.parquet`` file extension. - - If a database and relation object is provided, but the relation object - does not belong to the database. - - TypeError: If the type of ``derived_from`` is not supported. - - Examples: - Instantiated from a dataframe: - - >>> import patito as pt - >>> df = pt.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]}) - >>> pt.duckdb.Relation(df).filter("a > 2").to_df() - shape: (1, 2) - ┌─────┬─────┐ - │ a ┆ b │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞═════╪═════╡ - │ 3 ┆ 6 │ - └─────┴─────┘ - - Instantiated from an SQL query: - - >>> pt.duckdb.Relation("select 1 as a, 2 as b").to_df() - shape: (1, 2) - ┌─────┬─────┐ - │ a ┆ b │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞═════╪═════╡ - │ 1 ┆ 2 │ - └─────┴─────┘ - """ - import duckdb - - if isinstance(derived_from, Relation): - if ( - database is not None - and derived_from.database.connection is not database.connection - ): - raise ValueError( - "Relations can't be casted between database connections." - ) - self.database = derived_from.database - self._relation = derived_from._relation - self.model = derived_from.model - return - - if database is None: - self.database = Database.default() - else: - self.database = database - - if isinstance(derived_from, duckdb.DuckDBPyRelation): - relation = derived_from - elif isinstance(derived_from, str): - relation = self.database.connection.from_query(derived_from) - elif _PANDAS_AVAILABLE and isinstance(derived_from, pd.DataFrame): - # We must replace pd.NA with np.nan in order for it to be considered - # as null by DuckDB. Otherwise it will casted to the string - # or even segfault. - derived_from = derived_from.fillna(np.nan) - relation = self.database.connection.from_df(derived_from) - elif isinstance(derived_from, pl.DataFrame): - relation = self.database.connection.from_arrow(derived_from.to_arrow()) - elif isinstance(derived_from, Path): - if derived_from.suffix.lower() == ".parquet": - relation = self.database.connection.from_parquet(str(derived_from)) - elif derived_from.suffix.lower() == ".csv": - relation = self.database.connection.from_csv_auto(str(derived_from)) - else: - raise ValueError( - f"Unsupported file suffix {derived_from.suffix!r} for data import!" - ) - else: - raise TypeError # pragma: no cover - - self._relation = relation - if model is not None: - self.model = model # pyright: ignore - - def aggregate( - self, - *aggregations: str, - group_by: Union[str, Iterable[str]], - **named_aggregations: str, - ) -> Relation: - """ - Return relation formed by ``GROUP BY`` SQL aggregation(s). - - Args: - aggregations: Zero or more aggregation expressions such as - "sum(column_name)" and "count(distinct column_name)". - named_aggregations: Zero or more aggregated expressions where the keyword is - used to name the given aggregation. For example, - ``my_column="sum(column_name)"`` is inserted as - ``"sum(column_name) as my_column"`` in the executed SQL query. - group_by: A single column name or iterable collection of column names to - group by. - - Examples: - >>> import patito as pt - >>> df = pt.DataFrame({"a": [1, 2, 3], "b": ["X", "Y", "X"]}) - >>> relation = pt.duckdb.Relation(df) - >>> relation.aggregate( - ... "b", - ... "sum(a)", - ... "greatest(b)", - ... max_a="max(a)", - ... group_by="b", - ... ).to_df() - shape: (2, 4) - ┌─────┬────────┬─────────────┬───────┐ - │ b ┆ sum(a) ┆ greatest(b) ┆ max_a │ - │ --- ┆ --- ┆ --- ┆ --- │ - │ str ┆ f64 ┆ str ┆ i64 │ - ╞═════╪════════╪═════════════╪═══════╡ - │ X ┆ 4.0 ┆ X ┆ 3 │ - │ Y ┆ 2.0 ┆ Y ┆ 2 │ - └─────┴────────┴─────────────┴───────┘ - """ - expression = ", ".join( - aggregations - + tuple( - f"{expression} as {column_name}" - for column_name, expression in named_aggregations.items() - ) - ) - relation = self._relation.aggregate( - aggr_expr=expression, - group_expr=group_by if isinstance(group_by, str) else ", ".join(group_by), - ) - return self._wrap(relation=relation, schema_change=True) - - def add_suffix( - self, - suffix: str, - include: Optional[Collection[str]] = None, - exclude: Optional[Collection[str]] = None, - ) -> Relation: - """ - Add a suffix to all the columns of the relation. - - Args: - suffix: A string to append to add to all columns names. - include: If provided, only the given columns will be renamed. - exclude: If provided, the given columns will `not` be renamed. - - Raises: - TypeError: If both include `and` exclude are provided at the same time. - - Examples: - >>> import patito as pt - >>> relation = pt.duckdb.Relation("select 1 as column_1, 2 as column_2") - >>> relation.add_suffix("_renamed").to_df() - shape: (1, 2) - ┌──────────────────┬──────────────────┐ - │ column_1_renamed ┆ column_2_renamed │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞══════════════════╪══════════════════╡ - │ 1 ┆ 2 │ - └──────────────────┴──────────────────┘ - - >>> relation.add_suffix("_renamed", include=["column_1"]).to_df() - shape: (1, 2) - ┌──────────────────┬──────────┐ - │ column_1_renamed ┆ column_2 │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞══════════════════╪══════════╡ - │ 1 ┆ 2 │ - └──────────────────┴──────────┘ - - >>> relation.add_suffix("_renamed", exclude=["column_1"]).to_df() - shape: (1, 2) - ┌──────────┬──────────────────┐ - │ column_1 ┆ column_2_renamed │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞══════════╪══════════════════╡ - │ 1 ┆ 2 │ - └──────────┴──────────────────┘ - """ - if include is not None and exclude is not None: - raise TypeError("Both include and exclude provided at the same time!") - elif include is not None: - included = lambda column: column in include - elif exclude is not None: - included = lambda column: column not in exclude - else: - included = lambda _: True # noqa: E731 - - return self.select( - ", ".join( - f"{column} as {column}{suffix}" if included(column) else column - for column in self.columns - ) - ) - - def add_prefix( - self, - prefix: str, - include: Optional[Iterable[str]] = None, - exclude: Optional[Iterable[str]] = None, - ) -> Relation: - """ - Add a prefix to all the columns of the relation. - - Args: - prefix: A string to prepend to add to all the columns names. - include: If provided, only the given columns will be renamed. - exclude: If provided, the given columns will `not` be renamed. - - Raises: - TypeError: If both include `and` exclude are provided at the same time. - - Examples: - >>> import patito as pt - >>> relation = pt.duckdb.Relation("select 1 as column_1, 2 as column_2") - >>> relation.add_prefix("renamed_").to_df() - shape: (1, 2) - ┌──────────────────┬──────────────────┐ - │ renamed_column_1 ┆ renamed_column_2 │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞══════════════════╪══════════════════╡ - │ 1 ┆ 2 │ - └──────────────────┴──────────────────┘ - - >>> relation.add_prefix("renamed_", include=["column_1"]).to_df() - shape: (1, 2) - ┌──────────────────┬──────────┐ - │ renamed_column_1 ┆ column_2 │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞══════════════════╪══════════╡ - │ 1 ┆ 2 │ - └──────────────────┴──────────┘ - - >>> relation.add_prefix("renamed_", exclude=["column_1"]).to_df() - shape: (1, 2) - ┌──────────┬──────────────────┐ - │ column_1 ┆ renamed_column_2 │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞══════════╪══════════════════╡ - │ 1 ┆ 2 │ - └──────────┴──────────────────┘ - """ - if include is not None and exclude is not None: - raise TypeError("Both include and exclude provided at the same time!") - elif include is not None: - included = lambda column: column in include - elif exclude is not None: - included = lambda column: column not in exclude - else: - included = lambda _: True - - return self.select( - ", ".join( - f"{column} as {prefix}{column}" if included(column) else column - for column in self.columns - ) - ) - - def all(self, *filters: str, **equalities: Union[int, float, str]) -> bool: - """ - Return ``True`` if the given predicate(s) are true for all rows in the relation. - - See :func:`Relation.filter()` for additional information regarding the - parameters. - - Args: - filters: SQL predicates to satisfy. - equalities: SQL equality predicates to satisfy. - - Examples: - >>> import patito as pt - >>> df = pt.DataFrame( - ... { - ... "even_number": [2, 4, 6], - ... "odd_number": [1, 3, 5], - ... "zero": [0, 0, 0], - ... } - ... ) - >>> relation = pt.duckdb.Relation(df) - >>> relation.all(zero=0) - True - >>> relation.all( - ... "even_number % 2 = 0", - ... "odd_number % 2 = 1", - ... zero=0, - ... ) - True - >>> relation.all(zero=1) - False - >>> relation.all("odd_number % 2 = 0") - False - """ - return self.filter(*filters, **equalities).count() == self.count() - - def case( - self, - *, - from_column: str, - to_column: str, - mapping: Dict[sql.SQLLiteral, sql.SQLLiteral], - default: sql.SQLLiteral, - ) -> Relation: - """ - Map values of one column over to a new column. - - Args: - from_column: Name of column defining the domain of the mapping. - to_column: Name of column to insert the mapped values into. - mapping: Dictionary defining the mapping. The dictionary keys represent the - input values, while the dictionary values represent the output values. - Items are inserted into the SQL case statement by their repr() string - value. - default: Default output value for inputs which have no provided mapping. - - Examples: - The following case statement... - - >>> import patito as pt - >>> db = pt.duckdb.Database() - >>> relation = db.to_relation("select 1 as a union select 2 as a") - >>> relation.case( - ... from_column="a", - ... to_column="b", - ... mapping={1: "one", 2: "two"}, - ... default="three", - ... ).order(by="a").to_df() - shape: (2, 2) - ┌─────┬─────┐ - │ a ┆ b │ - │ --- ┆ --- │ - │ i64 ┆ str │ - ╞═════╪═════╡ - │ 1 ┆ one │ - │ 2 ┆ two │ - └─────┴─────┘ - - ... is equivalent with: - - >>> case_statement = pt.sql.Case( - ... on_column="a", - ... mapping={1: "one", 2: "two"}, - ... default="three", - ... as_column="b", - ... ) - >>> relation.select(f"*, {case_statement}").order(by="a").to_df() - shape: (2, 2) - ┌─────┬─────┐ - │ a ┆ b │ - │ --- ┆ --- │ - │ i64 ┆ str │ - ╞═════╪═════╡ - │ 1 ┆ one │ - │ 2 ┆ two │ - └─────┴─────┘ - """ - - case_statement = sql.Case( - on_column=from_column, - mapping=mapping, - default=default, - as_column=to_column, - ) - new_relation = self._relation.project(f"*, {case_statement}") - return self._wrap(relation=new_relation, schema_change=True) - - def cast( - self: RelationType, - model: Optional[ModelType] = None, - strict: bool = False, - include: Optional[Collection[str]] = None, - exclude: Optional[Collection[str]] = None, - ) -> RelationType: - """ - Cast the columns of the relation to types compatible with the associated model. - - The associated model must either be set by invoking - :ref:`Relation.set_model() ` or provided with the - ``model`` parameter. - - Any columns of the relation that are not part of the given model schema will be - left as-is. - - Args: - model: If :ref:`Relation.set_model() ` has not - been invoked or is intended to be overwritten. - strict: If set to ``False``, columns which are technically compliant with - the specified field type, will not be casted. For example, a column - annotated with ``int`` is technically compliant with ``SMALLINT``, even - if ``INTEGER`` is the default SQL type associated with ``int``-annotated - fields. If ``strict`` is set to ``True``, the resulting dtypes will - be forced to the default dtype associated with each python type. - include: If provided, only the given columns will be casted. - exclude: If provided, the given columns will `not` be casted. - - Returns: - New relation where the columns have been casted according to the model - schema. - - Examples: - >>> import patito as pt - >>> class Schema(pt.Model): - ... float_column: float - ... - >>> relation = pt.duckdb.Relation("select 1 as float_column") - >>> relation.types["float_column"] - INTEGER - >>> relation.cast(model=Schema).types["float_column"] - DOUBLE - - >>> relation = pt.duckdb.Relation("select 1::FLOAT as float_column") - >>> relation.cast(model=Schema).types["float_column"] - FLOAT - >>> relation.cast(model=Schema, strict=True).types["float_column"] - DOUBLE - - >>> class Schema(pt.Model): - ... column_1: float - ... column_2: float - ... - >>> relation = pt.duckdb.Relation( - ... "select 1 as column_1, 2 as column_2" - ... ).set_model(Schema) - >>> relation.types - {'column_1': INTEGER, 'column_2': INTEGER} - >>> relation.cast(include=["column_1"]).types - {'column_1': DOUBLE, 'column_2': INTEGER} - >>> relation.cast(exclude=["column_1"]).types - {'column_1': INTEGER, 'column_2': DOUBLE} - """ - if model is not None: - relation = self.set_model(model) - schema = model - elif self.model is not None: - relation = self - schema = cast(ModelType, self.model) - else: - class_name = self.__class__.__name__ - raise TypeError( - f"{class_name}.cast() invoked without " - f"{class_name}.model having been set! " - f"You should invoke {class_name}.set_model() first " - "or explicitly provide a model to .cast()." - ) - - if include is not None and exclude is not None: - raise ValueError( - "Both include and exclude provided to " - f"{self.__class__.__name__}.cast()!" - ) - elif include is not None: - include = set(include) - elif exclude is not None: - include = set(relation.columns) - set(exclude) - else: - include = set(relation.columns) - - new_columns = [] - for column, current_type in relation.types.items(): - if column not in schema.columns: - new_columns.append(column) - elif column in include and ( - strict or current_type not in schema.valid_sql_types[column] - ): - new_type = schema.sql_types[column] - new_columns.append(f"{column}::{new_type} as {column}") - else: - new_columns.append(column) - return cast(RelationType, self.select(*new_columns)) - - def coalesce( - self: RelationType, - **column_expressions: Union[str, int, float], - ) -> RelationType: - """ - Replace null-values in given columns with respective values. - - For example, ``coalesce(column_name=value)`` is compiled to: - ``f"coalesce({column_name}, {repr(value)}) as column_name"`` in the resulting - SQL. - - Args: - column_expressions: Keywords indicate which columns to coalesce, while the - string representation of the respective arguments are used as the - null-replacement. - - Return: - Relation: Relation where values have been filled in for nulls in the given - columns. - - Examples: - >>> import patito as pt - >>> df = pt.DataFrame( - ... { - ... "a": [1, None, 3], - ... "b": ["four", "five", None], - ... "c": [None, 8.0, 9.0], - ... } - ... ) - >>> relation = pt.duckdb.Relation(df) - >>> relation.coalesce(a=2, b="six").to_df() - shape: (3, 3) - ┌─────┬──────┬──────┐ - │ a ┆ b ┆ c │ - │ --- ┆ --- ┆ --- │ - │ i64 ┆ str ┆ f64 │ - ╞═════╪══════╪══════╡ - │ 1 ┆ four ┆ null │ - │ 2 ┆ five ┆ 8.0 │ - │ 3 ┆ six ┆ 9.0 │ - └─────┴──────┴──────┘ - """ - projections = [] - for column in self.columns: - if column in column_expressions: - expression = column_expressions[column] - projections.append(f"coalesce({column}, {expression!r}) as {column}") - else: - projections.append(column) - return cast(RelationType, self.select(*projections)) - - @property - def columns(self) -> List[str]: - """ - Return the columns of the relation as a list of strings. - - Examples: - >>> import patito as pt - >>> pt.duckdb.Relation("select 1 as a, 2 as b").columns - ['a', 'b'] - """ - # Under certain specific circumstances columns are suffixed with - # :1, which need to be removed from the column name. - return [column.partition(":")[0] for column in self._relation.columns] - - def count(self) -> int: - """ - Return the number of rows in the given relation. - - Returns: - Number of rows in the relation as an integer. - - Examples: - >>> import patito as pt - >>> relation = pt.duckdb.Relation("select 1 as a") - >>> relation.count() - 1 - >>> (relation + relation).count() - 2 - - The :ref:`Relation.__len__()` method invokes - ``Relation.count()`` under the hood, and is equivalent: - - >>> len(relation) - 1 - >>> len(relation + relation) - 2 - """ - return cast(Tuple[int], self._relation.aggregate("count(*)").fetchone())[0] - - def create_table(self: RelationType, name: str) -> RelationType: - """ - Create new database table based on relation. - - If ``self.model`` is set with - :ref:`Relation.set_model()`, then the model is used - to infer the table schema. Otherwise, a permissive table schema is created based - on the relation data. - - Returns: - Relation: A relation pointing to the newly created table. - - Examples: - >>> from typing import Literal - >>> import patito as pt - - >>> df = pt.DataFrame({"enum_column": ["A", "A", "B"]}) - >>> relation = pt.duckdb.Relation(df) - >>> relation.create_table("permissive_table").types - {'enum_column': VARCHAR} - - >>> class TableSchema(pt.Model): - ... enum_column: Literal["A", "B", "C"] - ... - >>> relation.set_model(TableSchema).create_table("strict_table").types - {'enum_column': enum__7ba49365cc1b0fd57e61088b3bc9aa25} - """ - if self.model is not None: - self.database.create_table(name=name, model=self.model) - self.insert_into(table=name) - else: - self._relation.create(table_name=name) - return cast(RelationType, self.database.table(name)) - - def create_view( - self: RelationType, - name: str, - replace: bool = False, - ) -> RelationType: - """ - Create new database view based on relation. - - Returns: - Relation: A relation pointing to the newly created view. - - Examples: - >>> import patito as pt - >>> db = pt.duckdb.Database() - >>> df = pt.DataFrame({"column": ["A", "A", "B"]}) - >>> relation = db.to_relation(df) - >>> relation.create_view("my_view") - >>> db.query("select * from my_view").to_df() - shape: (3, 1) - ┌────────┐ - │ column │ - │ --- │ - │ str │ - ╞════════╡ - │ A │ - │ A │ - │ B │ - └────────┘ - """ - self._relation.create_view(view_name=name, replace=replace) - return cast(RelationType, self.database.view(name)) - - def drop(self, *columns: str) -> Relation: - """ - Remove specified column(s) from relation. - - Args: - columns (str): Any number of string column names to be dropped. - - Examples: - >>> import patito as pt - >>> relation = pt.duckdb.Relation("select 1 as a, 2 as b, 3 as c") - >>> relation.columns - ['a', 'b', 'c'] - >>> relation.drop("c").columns - ['a', 'b'] - >>> relation.drop("b", "c").columns - ['a'] - """ - new_columns = self.columns.copy() - for column in columns: - new_columns.remove(column) - return self[new_columns] - - def distinct(self: RelationType) -> RelationType: - """ - Drop all duplicate rows of the relation. - - Example: - >>> import patito as pt - >>> df = pt.DataFrame( - ... [[1, 2, 3], [1, 2, 3], [3, 2, 1]], - ... schema=["a", "b", "c"], - ... orient="row", - ... ) - >>> relation = pt.duckdb.Relation(df) - >>> relation.to_df() - shape: (3, 3) - ┌─────┬─────┬─────┐ - │ a ┆ b ┆ c │ - │ --- ┆ --- ┆ --- │ - │ i64 ┆ i64 ┆ i64 │ - ╞═════╪═════╪═════╡ - │ 1 ┆ 2 ┆ 3 │ - │ 1 ┆ 2 ┆ 3 │ - │ 3 ┆ 2 ┆ 1 │ - └─────┴─────┴─────┘ - >>> relation.distinct().to_df() - shape: (2, 3) - ┌─────┬─────┬─────┐ - │ a ┆ b ┆ c │ - │ --- ┆ --- ┆ --- │ - │ i64 ┆ i64 ┆ i64 │ - ╞═════╪═════╪═════╡ - │ 1 ┆ 2 ┆ 3 │ - │ 3 ┆ 2 ┆ 1 │ - └─────┴─────┴─────┘ - """ - return self._wrap(self._relation.distinct(), schema_change=False) - - def except_(self: RelationType, other: RelationSource) -> RelationType: - """ - Remove all rows that can be found in the other other relation. - - Args: - other: Another relation or something that can be casted to a relation. - - Returns: - New relation without the rows that can be found in the other relation. - - Example: - >>> import patito as pt - >>> relation_123 = pt.duckdb.Relation( - ... "select 1 union select 2 union select 3" - ... ) - >>> relation_123.order(by="1").to_df() - shape: (3, 1) - ┌─────┐ - │ 1 │ - │ --- │ - │ i64 │ - ╞═════╡ - │ 1 │ - │ 2 │ - │ 3 │ - └─────┘ - >>> relation_2 = pt.duckdb.Relation("select 2") - >>> relation_2.to_df() - shape: (1, 1) - ┌─────┐ - │ 2 │ - │ --- │ - │ i64 │ - ╞═════╡ - │ 2 │ - └─────┘ - >>> relation_123.except_(relation_2).order(by="1").to_df() - shape: (2, 1) - ┌─────┐ - │ 1 │ - │ --- │ - │ i64 │ - ╞═════╡ - │ 1 │ - │ 3 │ - └─────┘ - """ - return self._wrap( - self._relation.except_(self.database.to_relation(other)._relation), - schema_change=False, - ) - - def execute(self) -> duckdb.DuckDBPyRelation: - """ - Execute built relation query and return result object. - - Returns: - A native ``duckdb.DuckDBPyResult`` object representing the executed query. - - Examples: - >>> import patito as pt - >>> relation = pt.duckdb.Relation( - ... "select 1 as a, 2 as b union select 3 as a, 4 as b" - ... ) - >>> result = relation.aggregate("sum(a)", group_by="").execute() - >>> result.description - [('sum(a)', 'NUMBER', None, None, None, None, None)] - >>> result.fetchall() - [(4,)] - """ - # A star-select is here performed in order to work around certain DuckDB bugs - return self._relation.project("*").execute() - - def get(self, *filters: str, **equalities: Union[str, int, float]) -> ModelType: - """ - Fetch the single row that matches the given filter(s). - - If you expect a relation to already return one row, you can use get() without - any arguments to return that row. - - Raises: - RuntimeError: RuntimeError is thrown if not exactly one single row matches - the given filter. - - Args: - filters (str): A conjunction of SQL where clauses. - equalities (Any): A conjunction of SQL equality clauses. The keyword name - is the column and the parameter is the value of the equality. - - Returns: - Model: A Patito model representing the given row. - - Examples: - >>> import patito as pt - >>> import polars as pl - >>> df = pt.DataFrame({"product_id": [1, 2, 3], "price": [10, 10, 20]}) - >>> relation = pt.duckdb.Relation(df).set_alias("my_relation") - - The ``.get()`` method will by default return a dynamically constructed - Patito model if no model has been associated with the given relation: - - >>> relation.get(product_id=1) - my_relation(product_id=1, price=10) - - If a Patito model has been associated with the relation, by the use of - :ref:`Relation.set_model()`, then the given model - will be used to represent the return type: - - >>> class Product(pt.Model): - ... product_id: int = pt.Field(unique=True) - ... price: float - ... - >>> relation.set_model(Product).get(product_id=1) - Product(product_id=1, price=10.0) - - You can invoke ``.get()`` without any arguments on relations containing - exactly one row: - - >>> relation.filter(product_id=1).get() - my_relation(product_id=1, price=10) - - If the given predicate matches multiple rows a ``MultipleRowsReturned`` - exception will be raised: - - >>> try: - ... relation.get(price=10) - ... except pt.exceptions.MultipleRowsReturned as e: - ... print(e) - ... - Relation.get(price=10) returned 2 rows! - - If the given predicate matches zero rows a ``RowDoesNotExist`` exception - will be raised: - - >>> try: - ... relation.get(price=0) - ... except pt.exceptions.RowDoesNotExist as e: - ... print(e) - ... - Relation.get(price=0) returned 0 rows! - """ - if filters or equalities: - relation = self.filter(*filters, **equalities) - else: - relation = self - result = relation.execute() - row = result.fetchone() - if row is None or result.fetchone() is not None: - args = [repr(f) for f in filters] - args.extend(f"{key}={value!r}" for key, value in equalities.items()) - args_string = ",".join(args) - - num_rows = relation.count() - if num_rows == 0: - raise RowDoesNotExist(f"Relation.get({args_string}) returned 0 rows!") - else: - raise MultipleRowsReturned( - f"Relation.get({args_string}) returned {num_rows} rows!" - ) - return self._to_model(row=row) - - def _to_model(self, row: tuple) -> ModelType: - """ - Cast row tuple to proper return type. - - If self.model is set, either by a class variable of a subclass or by the - invocation of Relation.set_model(), that type is used to construct the return - value. Otherwise, a pydantic model is dynamically created based on the column - schema of the relation. - """ - kwargs = {column: value for column, value in zip(self.columns, row)} - if self.model: - return self.model(**kwargs) - else: - RowModel = create_pydantic_model(relation=self._relation) - return cast( - ModelType, - RowModel(**kwargs), - ) - - def filter( - self: RelationType, - *filters: str, - **equalities: Union[str, int, float], - ) -> RelationType: - """ - Return subset of rows of relation that satisfy the given predicates. - - The method returns self if no filters are provided. - - Args: - filters: A conjunction of SQL ``WHERE`` clauses. - equalities: A conjunction of SQL equality clauses. The keyword name - is the column and the parameter is the value of the equality. - - Returns: - Relation: A new relation where all rows satisfy the given criteria. - - Examples: - >>> import patito as pt - >>> df = pt.DataFrame( - ... { - ... "number": [1, 2, 3, 4], - ... "string": ["A", "A", "B", "B"], - ... } - ... ) - >>> relation = pt.duckdb.Relation(df) - >>> relation.filter("number % 2 = 0").to_df() - shape: (2, 2) - ┌────────┬────────┐ - │ number ┆ string │ - │ --- ┆ --- │ - │ i64 ┆ str │ - ╞════════╪════════╡ - │ 2 ┆ A │ - │ 4 ┆ B │ - └────────┴────────┘ - - >>> relation.filter(number=1, string="A").to_df() - shape: (1, 2) - ┌────────┬────────┐ - │ number ┆ string │ - │ --- ┆ --- │ - │ i64 ┆ str │ - ╞════════╪════════╡ - │ 1 ┆ A │ - └────────┴────────┘ - """ - if not filters and not equalities: - return self - - clauses: List[str] = [] - if filters: - clauses.extend(filters) - if equalities: - clauses.extend(f"{key}={value!r}" for key, value in equalities.items()) - filter_string = " and ".join(f"({clause})" for clause in clauses) - return self._wrap(self._relation.filter(filter_string), schema_change=False) - - def join( - self: RelationType, - other: RelationSource, - *, - on: str, - how: Literal["inner", "left"] = "inner", - ) -> RelationType: - """ - Join relation with other relation source based on condition. - - See :ref:`duckdb.Relation.inner_join() ` and - :ref:`Relation.left_join() ` for alternative method - shortcuts instead of using ``how``. - - Args: - other: A source which can be casted to a ``Relation`` object, and be used - as the right table in the join. - on: Join condition following the ``INNER JOIN ... ON`` in the SQL query. - how: Either ``"left"`` or ``"inner"`` for what type of SQL join operation to - perform. - - Returns: - Relation: New relation based on the joined relations. - - Example: - >>> import patito as pt - >>> products_df = pt.DataFrame( - ... { - ... "product_name": ["apple", "banana", "oranges"], - ... "supplier_id": [2, 1, 3], - ... } - ... ) - >>> products = pt.duckdb.Relation(products_df) - >>> supplier_df = pt.DataFrame( - ... { - ... "id": [1, 2], - ... "supplier_name": ["Banana Republic", "Applies Inc."], - ... } - ... ) - >>> suppliers = pt.duckdb.Relation(supplier_df) - >>> products.set_alias("p").join( - ... suppliers.set_alias("s"), - ... on="p.supplier_id = s.id", - ... how="inner", - ... ).to_df() - shape: (2, 4) - ┌──────────────┬─────────────┬─────┬─────────────────┐ - │ product_name ┆ supplier_id ┆ id ┆ supplier_name │ - │ --- ┆ --- ┆ --- ┆ --- │ - │ str ┆ i64 ┆ i64 ┆ str │ - ╞══════════════╪═════════════╪═════╪═════════════════╡ - │ apple ┆ 2 ┆ 2 ┆ Applies Inc. │ - │ banana ┆ 1 ┆ 1 ┆ Banana Republic │ - └──────────────┴─────────────┴─────┴─────────────────┘ - - >>> products.set_alias("p").join( - ... suppliers.set_alias("s"), - ... on="p.supplier_id = s.id", - ... how="left", - ... ).to_df() - shape: (3, 4) - ┌──────────────┬─────────────┬──────┬─────────────────┐ - │ product_name ┆ supplier_id ┆ id ┆ supplier_name │ - │ --- ┆ --- ┆ --- ┆ --- │ - │ str ┆ i64 ┆ i64 ┆ str │ - ╞══════════════╪═════════════╪══════╪═════════════════╡ - │ apple ┆ 2 ┆ 2 ┆ Applies Inc. │ - │ banana ┆ 1 ┆ 1 ┆ Banana Republic │ - │ oranges ┆ 3 ┆ null ┆ null │ - └──────────────┴─────────────┴──────┴─────────────────┘ - """ - return self._wrap( - self._relation.join( - self.database.to_relation(other)._relation, condition=on, how=how - ), - schema_change=True, - ) - - def inner_join(self: RelationType, other: RelationSource, on: str) -> RelationType: - """ - Inner join relation with other relation source based on condition. - - Args: - other: A source which can be casted to a ``Relation`` object, and be used - as the right table in the join. - on: Join condition following the ``INNER JOIN ... ON`` in the SQL query. - - Returns: - Relation: New relation based on the joined relations. - - Example: - >>> import patito as pt - >>> products_df = pt.DataFrame( - ... { - ... "product_name": ["apple", "banana", "oranges"], - ... "supplier_id": [2, 1, 3], - ... } - ... ) - >>> products = pt.duckdb.Relation(products_df) - >>> supplier_df = pt.DataFrame( - ... { - ... "id": [1, 2], - ... "supplier_name": ["Banana Republic", "Applies Inc."], - ... } - ... ) - >>> suppliers = pt.duckdb.Relation(supplier_df) - >>> products.set_alias("p").inner_join( - ... suppliers.set_alias("s"), - ... on="p.supplier_id = s.id", - ... ).to_df() - shape: (2, 4) - ┌──────────────┬─────────────┬─────┬─────────────────┐ - │ product_name ┆ supplier_id ┆ id ┆ supplier_name │ - │ --- ┆ --- ┆ --- ┆ --- │ - │ str ┆ i64 ┆ i64 ┆ str │ - ╞══════════════╪═════════════╪═════╪═════════════════╡ - │ apple ┆ 2 ┆ 2 ┆ Applies Inc. │ - │ banana ┆ 1 ┆ 1 ┆ Banana Republic │ - └──────────────┴─────────────┴─────┴─────────────────┘ - """ - return self._wrap( - self._relation.join( - other_rel=self.database.to_relation(other)._relation, - condition=on, - how="inner", - ), - schema_change=True, - ) - - def left_join(self: RelationType, other: RelationSource, on: str) -> RelationType: - """ - Left join relation with other relation source based on condition. - - Args: - other: A source which can be casted to a Relation object, and be used as - the right table in the join. - on: Join condition following the ``LEFT JOIN ... ON`` in the SQL query. - - Returns: - Relation: New relation based on the joined tables. - - Example: - >>> import patito as pt - >>> products_df = pt.DataFrame( - ... { - ... "product_name": ["apple", "banana", "oranges"], - ... "supplier_id": [2, 1, 3], - ... } - ... ) - >>> products = pt.duckdb.Relation(products_df) - >>> supplier_df = pt.DataFrame( - ... { - ... "id": [1, 2], - ... "supplier_name": ["Banana Republic", "Applies Inc."], - ... } - ... ) - >>> suppliers = pt.duckdb.Relation(supplier_df) - >>> products.set_alias("p").left_join( - ... suppliers.set_alias("s"), - ... on="p.supplier_id = s.id", - ... ).to_df() - shape: (3, 4) - ┌──────────────┬─────────────┬──────┬─────────────────┐ - │ product_name ┆ supplier_id ┆ id ┆ supplier_name │ - │ --- ┆ --- ┆ --- ┆ --- │ - │ str ┆ i64 ┆ i64 ┆ str │ - ╞══════════════╪═════════════╪══════╪═════════════════╡ - │ apple ┆ 2 ┆ 2 ┆ Applies Inc. │ - │ banana ┆ 1 ┆ 1 ┆ Banana Republic │ - │ oranges ┆ 3 ┆ null ┆ null │ - └──────────────┴─────────────┴──────┴─────────────────┘ - """ - return self._wrap( - self._relation.join( - other_rel=self.database.to_relation(other)._relation, - condition=on, - how="left", - ), - schema_change=True, - ) - - def limit(self: RelationType, n: int, *, offset: int = 0) -> RelationType: - """ - Remove all but the first n rows. - - Args: - n: The number of rows to keep. - offset: Disregard the first ``offset`` rows before starting to count which - rows to keep. - - Returns: - New relation with only n rows. - - Example: - >>> import patito as pt - >>> relation = ( - ... pt.duckdb.Relation("select 1 as column") - ... + pt.duckdb.Relation("select 2 as column") - ... + pt.duckdb.Relation("select 3 as column") - ... + pt.duckdb.Relation("select 4 as column") - ... ) - >>> relation.limit(2).to_df() - shape: (2, 1) - ┌────────┐ - │ column │ - │ --- │ - │ i64 │ - ╞════════╡ - │ 1 │ - │ 2 │ - └────────┘ - >>> relation.limit(2, offset=2).to_df() - shape: (2, 1) - ┌────────┐ - │ column │ - │ --- │ - │ i64 │ - ╞════════╡ - │ 3 │ - │ 4 │ - └────────┘ - """ - return self._wrap(self._relation.limit(n=n, offset=offset), schema_change=False) - - def order(self: RelationType, by: Union[str, Iterable[str]]) -> RelationType: - """ - Change the order of the rows of the relation. - - Args: - by: An ``ORDER BY`` SQL expression such as ``"age DESC"`` or - ``("age DESC", "name ASC")``. - - Returns: - New relation where the rows have been ordered according to ``by``. - - Example: - >>> import patito as pt - >>> df = pt.DataFrame( - ... { - ... "name": ["Alice", "Bob", "Charles", "Diana"], - ... "age": [20, 20, 30, 35], - ... } - ... ) - >>> df - shape: (4, 2) - ┌─────────┬─────┐ - │ name ┆ age │ - │ --- ┆ --- │ - │ str ┆ i64 │ - ╞═════════╪═════╡ - │ Alice ┆ 20 │ - │ Bob ┆ 20 │ - │ Charles ┆ 30 │ - │ Diana ┆ 35 │ - └─────────┴─────┘ - >>> relation = pt.duckdb.Relation(df) - >>> relation.order(by="age desc").to_df() - shape: (4, 2) - ┌─────────┬─────┐ - │ name ┆ age │ - │ --- ┆ --- │ - │ str ┆ i64 │ - ╞═════════╪═════╡ - │ Diana ┆ 35 │ - │ Charles ┆ 30 │ - │ Alice ┆ 20 │ - │ Bob ┆ 20 │ - └─────────┴─────┘ - >>> relation.order(by=["age desc", "name desc"]).to_df() - shape: (4, 2) - ┌─────────┬─────┐ - │ name ┆ age │ - │ --- ┆ --- │ - │ str ┆ i64 │ - ╞═════════╪═════╡ - │ Diana ┆ 35 │ - │ Charles ┆ 30 │ - │ Bob ┆ 20 │ - │ Alice ┆ 20 │ - └─────────┴─────┘ - """ - order_expr = by if isinstance(by, str) else ", ".join(by) - return self._wrap( - self._relation.order(order_expr=order_expr), - schema_change=False, - ) - - def insert_into( - self: RelationType, - table: str, - ) -> RelationType: - """ - Insert all rows of the relation into a given table. - - The relation must contain all the columns present in the target table. - Extra columns are ignored and the column order is automatically matched - with the target table. - - Args: - table: Name of table for which to insert values into. - - Returns: - Relation: The original relation, i.e. ``self``. - - Examples: - >>> import patito as pt - >>> db = pt.duckdb.Database() - >>> db.to_relation("select 1 as a").create_table("my_table") - >>> db.table("my_table").to_df() - shape: (1, 1) - ┌─────┐ - │ a │ - │ --- │ - │ i64 │ - ╞═════╡ - │ 1 │ - └─────┘ - >>> db.to_relation("select 2 as a").insert_into("my_table") - >>> db.table("my_table").to_df() - shape: (2, 1) - ┌─────┐ - │ a │ - │ --- │ - │ i64 │ - ╞═════╡ - │ 1 │ - │ 2 │ - └─────┘ - """ - table_relation = self.database.table(table) - missing_columns = set(table_relation.columns) - set(self.columns) - if missing_columns: - raise TypeError( - f"Relation is missing column(s) {missing_columns} " - f"in order to be inserted into table '{table}'!", - ) - - reordered_relation = self[table_relation.columns] - reordered_relation._relation.insert_into(table_name=table) - return self - - def intersect(self: RelationType, other: RelationSource) -> RelationType: - """ - Return a new relation containing the rows that are present in both relations. - - This is a set operation which will remove duplicate rows as well. - - Args: - other: Another relation with the same column names. - - Returns: - Relation[Model]: A new relation with only those rows that are present in - both relations. - - Example: - >>> import patito as pt - >>> df1 = pt.DataFrame({"a": [1, 1, 2], "b": [1, 1, 2]}) - >>> df2 = pt.DataFrame({"a": [1, 1, 3], "b": [1, 1, 3]}) - >>> pt.duckdb.Relation(df1).intersect(pt.duckdb.Relation(df2)).to_df() - shape: (1, 2) - ┌─────┬─────┐ - │ a ┆ b │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞═════╪═════╡ - │ 1 ┆ 1 │ - └─────┴─────┘ - """ - other = self.database.to_relation(other) - return self._wrap( - self._relation.intersect(other._relation), - schema_change=False, - ) - - def select( - self, - *projections: Union[str, int, float], - **named_projections: Union[str, int, float], - ) -> Relation: - """ - Return relation based on one or more SQL ``SELECT`` projections. - - Keyword arguments are converted into ``{arg} as {keyword}`` in the executed SQL - query. - - Args: - *projections: One or more strings representing SQL statements to be - selected. For example ``"2"`` or ``"another_column"``. - **named_projections: One ore more keyword arguments where the keyword - specifies the name of the new column and the value is an SQL statement - defining the content of the new column. For example - ``new_column="2 * another_column"``. - - Examples: - >>> import patito as pt - >>> db = pt.duckdb.Database() - >>> relation = db.to_relation(pt.DataFrame({"original_column": [1, 2, 3]})) - >>> relation.select("*").to_df() - shape: (3, 1) - ┌─────────────────┐ - │ original_column │ - │ --- │ - │ i64 │ - ╞═════════════════╡ - │ 1 │ - │ 2 │ - │ 3 │ - └─────────────────┘ - >>> relation.select("*", multiplied_column="2 * original_column").to_df() - shape: (3, 2) - ┌─────────────────┬───────────────────┐ - │ original_column ┆ multiplied_column │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞═════════════════╪═══════════════════╡ - │ 1 ┆ 2 │ - │ 2 ┆ 4 │ - │ 3 ┆ 6 │ - └─────────────────┴───────────────────┘ - """ - # We expand '*' to an explicit list of columns in order to support redefining - # columns within the star expressed columns. - expanded_projections: list = list(projections) - try: - star_index = projections.index("*") - if named_projections: - # Allow explicitly named projections to overwrite star-selected columns - expanded_projections[star_index : star_index + 1] = [ - column for column in self.columns if column not in named_projections - ] - else: - expanded_projections[star_index : star_index + 1] = self.columns - except ValueError: - pass - - projection = ", ".join( - expanded_projections - + list( # pyright: ignore - f"{expression} as {column_name}" - for column_name, expression in named_projections.items() - ) - ) - try: - relation = self._relation.project(projection) - except RuntimeError as exc: # pragma: no cover - # We might get a RunTime error if the enum type has not - # been created yet. If so, we create all enum types for - # this model. - if self.model is not None and _is_missing_enum_type_exception(exc): - self.database.create_enum_types(model=self.model) - relation = self._relation.project(projection) - else: - raise exc - return self._wrap(relation=relation, schema_change=True) - - def rename(self, **columns: str) -> Relation: - """ - Rename columns as specified. - - Args: - **columns: A set of keyword arguments where the keyword is the old column - name and the value is the new column name. - - Raises: - ValueError: If any of the given keywords do not exist as columns in the - relation. - - Examples: - >>> import patito as pt - >>> relation = pt.duckdb.Relation("select 1 as a, 2 as b") - >>> relation.rename(b="c").to_df().select(["a", "c"]) - shape: (1, 2) - ┌─────┬─────┐ - │ a ┆ c │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞═════╪═════╡ - │ 1 ┆ 2 │ - └─────┴─────┘ - """ - existing_columns = set(self.columns) - missing = set(columns.keys()) - set(existing_columns) - if missing: - raise ValueError( - f"Column '{missing.pop()}' can not be renamed as it does not exist. " - f"The columns of the relation are: {', '.join(existing_columns)}." - ) - # If we rename a column to overwrite another existing one, the column should - # be overwritten. - existing_columns = set(existing_columns) - set(columns.values()) - relation = self._relation.project( - ", ".join( - f"{column} as {columns.get(column, column)}" - for column in existing_columns - ) - ) - return self._wrap(relation=relation, schema_change=True) - - def set_alias(self: RelationType, name: str) -> RelationType: - """ - Set SQL alias for the given relation to be used in further queries. - - Args: - name: The new alias for the given relation. - - Returns: - Relation: A new relation containing the same query but addressable with the - new alias. - - Example: - >>> import patito as pt - >>> relation_1 = pt.duckdb.Relation("select 1 as a, 2 as b") - >>> relation_2 = pt.duckdb.Relation("select 1 as a, 3 as c") - >>> relation_1.set_alias("x").inner_join( - ... relation_2.set_alias("y"), - ... on="x.a = y.a", - ... ).select("x.a", "y.a", "b", "c").to_df() - shape: (1, 4) - ┌─────┬─────┬─────┬─────┐ - │ a ┆ a:1 ┆ b ┆ c │ - │ --- ┆ --- ┆ --- ┆ --- │ - │ i64 ┆ i64 ┆ i64 ┆ i64 │ - ╞═════╪═════╪═════╪═════╡ - │ 1 ┆ 1 ┆ 2 ┆ 3 │ - └─────┴─────┴─────┴─────┘ - """ - return self._wrap( - self._relation.set_alias(name), - schema_change=False, - ) - - def set_model(self, model): # type: ignore[no-untyped-def] # noqa: ANN - """ - Associate a give Patito model with the relation. - - The returned relation has an associated ``.model`` attribute which can in turn - be used by several methods such as :ref:`Relation.get()`, - :ref:`Relation.create_table()`, and - :ref:`Relation.__iter__`. - - Args: - model: A Patito Model class specifying the intended schema of the relation. - - Returns: - Relation[model]: A new relation with the associated model. - - Example: - >>> from typing import Literal - >>> import patito as pt - >>> class MySchema(pt.Model): - ... float_column: float - ... enum_column: Literal["A", "B", "C"] - ... - >>> relation = pt.duckdb.Relation( - ... "select 1 as float_column, 'A' as enum_column" - ... ) - >>> relation.get() - query_relation(float_column=1, enum_column='A') - >>> relation.set_model(MySchema).get() - MySchema(float_column=1.0, enum_column='A') - >>> relation.create_table("unmodeled_table").types - {'float_column': INTEGER, 'enum_column': VARCHAR} - >>> relation.set_model(MySchema).create_table("modeled_table").types - {'float_column': DOUBLE, - 'enum_column': enum__7ba49365cc1b0fd57e61088b3bc9aa25} - """ - # We are not able to annotate the generic instance of type(self)[type(model)] - # due to the lack of higher-kinded generics in python as of this writing. - # See: https://github.com/python/typing/issues/548 - # This cast() will be wrong for sub-classes of Relation... - return cast( - Relation[model], - type(self)( - derived_from=self._relation, - database=self.database, - model=model, - ), - ) - - @property - def types(self): # type: ignore[no-untyped-def] # noqa - """ - Return the SQL types of all the columns of the given relation. - - Returns: - dict[str, str]: A dictionary where the keys are the column names and the - values are SQL types as strings. - - Examples: - >>> import patito as pt - >>> pt.duckdb.Relation("select 1 as a, 'my_value' as b").types - {'a': INTEGER, 'b': VARCHAR} - """ - return dict(zip(self.columns, self._relation.types)) - - def to_pandas(self) -> "pd.DataFrame": - """ - Return a pandas DataFrame representation of relation object. - - Returns: A ``pandas.DataFrame`` object containing all the data of the relation. - - Example: - >>> import patito as pt - >>> pt.duckdb.Relation("select 1 as column union select 2 as column").order( - ... by="1" - ... ).to_pandas() - column - 0 1 - 1 2 - """ - return self._relation.to_df() - - def to_df(self) -> DataFrame: - """ - Return a polars DataFrame representation of relation object. - - Returns: A ``patito.DataFrame`` object which inherits from ``polars.DataFrame``. - - Example: - >>> import patito as pt - >>> pt.duckdb.Relation("select 1 as column union select 2 as column").order( - ... by="1" - ... ).to_df() - shape: (2, 1) - ┌────────┐ - │ column │ - │ --- │ - │ i64 │ - ╞════════╡ - │ 1 │ - │ 2 │ - └────────┘ - """ - # Here we do a star-select to work around certain weird issues with DuckDB - self._relation = self._relation.project("*") - arrow_table = cast(pa.lib.Table, self._relation.to_arrow_table()) - try: - # We cast `INTEGER`-typed columns to `pl.Int64` when converting to Polars - # because polars is much more eager to store integer Series as 64-bit - # integers. Otherwise there must be done a lot of manual casting whenever - # you cross the boundary between DuckDB and polars. - return DataFrame._from_arrow(arrow_table).with_columns( - pl.col(pl.Int32).cast(pl.Int64) - ) - except (pa.ArrowInvalid, pl.ArrowError): # pragma: no cover - # Empty relations with enum columns can sometimes produce errors. - # As a last-ditch effort, we convert such columns to VARCHAR. - casted_columns = [ - f"{field.name}::VARCHAR as {field.name}" - if isinstance(field.type, pa.DictionaryType) - else field.name - for field in arrow_table.schema - ] - non_enum_relation = self._relation.project(", ".join(casted_columns)) - arrow_table = non_enum_relation.to_arrow_table() - return DataFrame._from_arrow(arrow_table).with_columns( - pl.col(pl.Int32).cast(pl.Int64) - ) - - def to_series(self) -> pl.Series: - """ - Convert the given relation to a polars Series. - - Raises: - TypeError: If the given relation does not contain exactly one column. - - Returns: A ``polars.Series`` object containing the data of the relation. - - Example: - >>> import patito as pt - >>> relation = pt.duckdb.Relation("select 1 as a union select 2 as a") - >>> relation.order(by="a").to_series() - shape: (2,) - Series: 'a' [i32] - [ - 1 - 2 - ] - """ - if len(self._relation.columns) != 1: - raise TypeError( - f"{self.__class__.__name__}.to_series() was invoked on a relation with " - f"{len(self._relation.columns)} columns, while exactly 1 is required!" - ) - dataframe: DataFrame = DataFrame._from_arrow(self._relation.to_arrow_table()) - return dataframe.to_series(index=0).alias(name=self.columns[0]) - - def union(self: RelationType, other: RelationSource) -> RelationType: - """ - Produce a new relation that contains the rows of both relations. - - The ``+`` operator can also be used to union two relations. - - The two relations must have the same column names, but not necessarily in the - same order as reordering of columns is automatically performed, unlike regular - SQL. - - Duplicates are `not` dropped. - - Args: - other: A ``patito.duckdb.Relation`` object or something that can be - *casted* to ``patito.duckdb.Relation``. - See :ref:`Relation`. - - Returns: - New relation containing the rows of both ``self`` and ``other``. - - Raises: - TypeError: If the two relations do not contain the same columns. - - Examples: - >>> import patito as pt - >>> relation_1 = pt.duckdb.Relation("select 1 as a") - >>> relation_2 = pt.duckdb.Relation("select 2 as a") - >>> relation_1.union(relation_2).to_df() - shape: (2, 1) - ┌─────┐ - │ a │ - │ --- │ - │ i64 │ - ╞═════╡ - │ 1 │ - │ 2 │ - └─────┘ - - >>> (relation_1 + relation_2).to_df() - shape: (2, 1) - ┌─────┐ - │ a │ - │ --- │ - │ i64 │ - ╞═════╡ - │ 1 │ - │ 2 │ - └─────┘ - """ - other_relation = self.database.to_relation(other) - if set(self.columns) != set(other_relation.columns): - msg = "Union between relations with different column names is not allowed." - additional_left = set(self.columns) - set(other_relation.columns) - additional_right = set(other_relation.columns) - set(self.columns) - if additional_left: - msg += f" Additional columns in left relation: {additional_left}." - if additional_right: - msg += f" Additional columns in right relation: {additional_right}." - raise TypeError(msg) - if other_relation.columns != self.columns: - reordered_relation = other_relation[self.columns] - else: - reordered_relation = other_relation - unioned_relation = self._relation.union(reordered_relation._relation) - return self._wrap(relation=unioned_relation, schema_change=False) - - def with_columns( - self, - **named_projections: Union[str, int, float], - ) -> Relation: - """ - Return relations with additional columns. - - If the provided columns expressions already exists as a column on the relation, - the given column is overwritten. - - Args: - named_projections: A set of column expressions, where the keyword is used - as the column name, while the right-hand argument is a valid SQL - expression. - - Returns: - Relation with the given columns appended, or possibly overwritten. - - Examples: - >>> import patito as pt - >>> db = pt.duckdb.Database() - >>> relation = db.to_relation("select 1 as a, 2 as b") - >>> relation.with_columns(c="a + b").to_df() - shape: (1, 3) - ┌─────┬─────┬─────┐ - │ a ┆ b ┆ c │ - │ --- ┆ --- ┆ --- │ - │ i64 ┆ i64 ┆ i64 │ - ╞═════╪═════╪═════╡ - │ 1 ┆ 2 ┆ 3 │ - └─────┴─────┴─────┘ - """ - return self.select("*", **named_projections) - - def with_missing_defaultable_columns( - self: RelationType, - include: Optional[Iterable[str]] = None, - exclude: Optional[Iterable[str]] = None, - ) -> RelationType: - """ - Add missing defaultable columns filled with the default values of correct type. - - Make sure to invoke :ref:`Relation.set_model()` with - the correct model schema before executing - ``Relation.with_missing_default_columns()``. - - Args: - include: If provided, only fill in default values for missing columns part - of this collection of column names. - exclude: If provided, do `not` fill in default values for missing columns - part of this collection of column names. - - Returns: - Relation: New relation where missing columns with default values according - to the schema have been filled in. - - Example: - >>> import patito as pt - >>> class MyModel(pt.Model): - ... non_default_column: int - ... another_non_default_column: int - ... default_column: int = 42 - ... another_default_column: int = 42 - ... - >>> relation = pt.duckdb.Relation( - ... "select 1 as non_default_column, 2 as default_column" - ... ) - >>> relation.to_df() - shape: (1, 2) - ┌────────────────────┬────────────────┐ - │ non_default_column ┆ default_column │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞════════════════════╪════════════════╡ - │ 1 ┆ 2 │ - └────────────────────┴────────────────┘ - >>> relation.set_model(MyModel).with_missing_defaultable_columns().to_df() - shape: (1, 3) - ┌────────────────────┬────────────────┬────────────────────────┐ - │ non_default_column ┆ default_column ┆ another_default_column │ - │ --- ┆ --- ┆ --- │ - │ i64 ┆ i64 ┆ i64 │ - ╞════════════════════╪════════════════╪════════════════════════╡ - │ 1 ┆ 2 ┆ 42 │ - └────────────────────┴────────────────┴────────────────────────┘ - """ - if self.model is None: - class_name = self.__class__.__name__ - raise TypeError( - f"{class_name}.with_missing_default_columns() invoked without " - f"{class_name}.model having been set! " - f"You should invoke {class_name}.set_model() first!" - ) - elif include is not None and exclude is not None: - raise TypeError("Both include and exclude provided at the same time!") - - missing_columns = set(self.model.columns) - set(self.columns) - defaultable_columns = self.model.defaults.keys() - missing_defaultable_columns = missing_columns & defaultable_columns - - if exclude is not None: - missing_defaultable_columns -= set(exclude) - elif include is not None: - missing_defaultable_columns = missing_defaultable_columns & set(include) - - projection = "*" - for column_name in missing_defaultable_columns: - sql_type = self.model.sql_types[column_name] - default_value = self.model.defaults[column_name] - projection += f", {default_value!r}::{sql_type} as {column_name}" - - try: - relation = self._relation.project(projection) - except Exception as exc: # pragma: no cover - # We might get a RunTime error if the enum type has not - # been created yet. If so, we create all enum types for - # this model. - if _is_missing_enum_type_exception(exc): - self.database.create_enum_types(model=self.model) - relation = self._relation.project(projection) - else: - raise exc - return self._wrap(relation=relation, schema_change=False) - - def with_missing_nullable_columns( - self: RelationType, - include: Optional[Iterable[str]] = None, - exclude: Optional[Iterable[str]] = None, - ) -> RelationType: - """ - Add missing nullable columns filled with correctly typed nulls. - - Make sure to invoke :ref:`Relation.set_model()` with - the correct model schema before executing - ``Relation.with_missing_nullable_columns()``. - - Args: - include: If provided, only fill in null values for missing columns part of - this collection of column names. - exclude: If provided, do `not` fill in null values for missing columns - part of this collection of column names. - - Returns: - Relation: New relation where missing nullable columns have been filled in - with null values. - - Example: - >>> from typing import Optional - >>> import patito as pt - >>> class MyModel(pt.Model): - ... non_nullable_column: int - ... nullable_column: Optional[int] - ... another_nullable_column: Optional[int] - ... - >>> relation = pt.duckdb.Relation("select 1 as nullable_column") - >>> relation.to_df() - shape: (1, 1) - ┌─────────────────┐ - │ nullable_column │ - │ --- │ - │ i64 │ - ╞═════════════════╡ - │ 1 │ - └─────────────────┘ - >>> relation.set_model(MyModel).with_missing_nullable_columns().to_df() - shape: (1, 2) - ┌─────────────────┬─────────────────────────┐ - │ nullable_column ┆ another_nullable_column │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞═════════════════╪═════════════════════════╡ - │ 1 ┆ null │ - └─────────────────┴─────────────────────────┘ - """ - if self.model is None: - class_name = self.__class__.__name__ - raise TypeError( - f"{class_name}.with_missing_nullable_columns() invoked without " - f"{class_name}.model having been set! " - f"You should invoke {class_name}.set_model() first!" - ) - elif include is not None and exclude is not None: - raise TypeError("Both include and exclude provided at the same time!") - - missing_columns = set(self.model.columns) - set(self.columns) - missing_nullable_columns = self.model.nullable_columns & missing_columns - - if exclude is not None: - missing_nullable_columns -= set(exclude) - elif include is not None: - missing_nullable_columns = missing_nullable_columns & set(include) - - projection = "*" - for missing_nullable_column in missing_nullable_columns: - sql_type = self.model.sql_types[missing_nullable_column] - projection += f", null::{sql_type} as {missing_nullable_column}" - - try: - relation = self._relation.project(projection) - except Exception as exc: # pragma: no cover - # We might get a RunTime error if the enum type has not - # been created yet. If so, we create all enum types for - # this model. - if _is_missing_enum_type_exception(exc): - self.database.create_enum_types(model=self.model) - relation = self._relation.project(projection) - else: - raise exc - return self._wrap(relation=relation, schema_change=False) - - def __add__(self: RelationType, other: RelationSource) -> RelationType: - """ - Execute ``self.union(other)``. - - See :ref:`Relation.union()` for full documentation. - """ - return self.union(other) - - def __eq__(self, other: object) -> bool: - """Check if Relation is equal to a Relation-able data source.""" - other_relation = self.database.to_relation(other) # type: ignore - # Check if the number of rows are equal, and then check if each row is equal. - # Use zip(self, other_relation, strict=True) when we upgrade to Python 3.10. - return self.count() == other_relation.count() and all( - row == other_row for row, other_row in zip(self, other_relation) - ) - - def __getitem__(self, key: Union[str, Iterable[str]]) -> Relation: - """ - Return Relation with selected columns. - - Uses :ref:`Relation.select()` under-the-hood in order to - perform the selection. Can technically be used to rename columns, - define derived columns, and so on, but prefer the use of Relation.select() for - such use cases. - - Args: - key: Columns to select, either a single column represented as a string, or - an iterable of strings. - - Returns: - New relation only containing the column subset specified. - - Example: - >>> import patito as pt - >>> relation = pt.duckdb.Relation("select 1 as a, 2 as b, 3 as c") - >>> relation.to_df() - shape: (1, 3) - ┌─────┬─────┬─────┐ - │ a ┆ b ┆ c │ - │ --- ┆ --- ┆ --- │ - │ i64 ┆ i64 ┆ i64 │ - ╞═════╪═════╪═════╡ - │ 1 ┆ 2 ┆ 3 │ - └─────┴─────┴─────┘ - >>> relation[["a", "b"]].to_df() - shape: (1, 2) - ┌─────┬─────┐ - │ a ┆ b │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞═════╪═════╡ - │ 1 ┆ 2 │ - └─────┴─────┘ - >>> relation["a"].to_df() - shape: (1, 1) - ┌─────┐ - │ a │ - │ --- │ - │ i64 │ - ╞═════╡ - │ 1 │ - └─────┘ - """ - projection = key if isinstance(key, str) else ", ".join(key) - return self._wrap( - relation=self._relation.project(projection), - schema_change=True, - ) - - def __iter__(self) -> Iterator[ModelType]: - """ - Iterate over rows in relation. - - If :ref:`Relation.set_model()` has been invoked - first, the given model will be used to deserialize each row. Otherwise a Patito - model is dynamically constructed which fits the schema of the relation. - - Returns: - Iterator[Model]: An iterator of patito Model objects representing each row. - - Example: - >>> from typing import Literal - >>> import patito as pt - >>> df = pt.DataFrame({"float_column": [1, 2], "enum_column": ["A", "B"]}) - >>> relation = pt.duckdb.Relation(df).set_alias("my_relation") - >>> for row in relation: - ... print(row) - ... - float_column=1 enum_column='A' - float_column=2 enum_column='B' - >>> list(relation) - [my_relation(float_column=1, enum_column='A'), - my_relation(float_column=2, enum_column='B')] - - >>> class MySchema(pt.Model): - ... float_column: float - ... enum_column: Literal["A", "B", "C"] - ... - >>> relation = relation.set_model(MySchema) - >>> for row in relation: - ... print(row) - ... - float_column=1.0 enum_column='A' - float_column=2.0 enum_column='B' - >>> list(relation) - [MySchema(float_column=1.0, enum_column='A'), - MySchema(float_column=2.0, enum_column='B')] - """ - result = self._relation.execute() - while True: - row_tuple = result.fetchone() - if not row_tuple: - return - else: - yield self._to_model(row_tuple) - - def __len__(self) -> int: - """ - Return the number of rows in the relation. - - See :ref:`Relation.count()` for full documentation. - """ - return self.count() - - def __str__(self) -> str: - """ - Return string representation of Relation object. - - Includes an expression tree, the result columns, and a result preview. - - Example: - >>> import patito as pt - >>> products = pt.duckdb.Relation( - ... pt.DataFrame( - ... { - ... "product_name": ["apple", "red_apple", "banana", "oranges"], - ... "supplier_id": [2, 2, 1, 3], - ... } - ... ) - ... ).set_alias("products") - >>> print(str(products)) # xdoctest: +SKIP - --------------------- - --- Relation Tree --- - --------------------- - arrow_scan(94609350519648, 140317161740928, 140317161731168, 1000000)\ - - --------------------- - -- Result Columns -- - --------------------- - - product_name (VARCHAR) - - supplier_id (BIGINT)\ - - --------------------- - -- Result Preview -- - --------------------- - product_name supplier_id - VARCHAR BIGINT - [ Rows: 4] - apple 2 - red_apple 2 - banana 1 - oranges 3 - - >>> suppliers = pt.duckdb.Relation( - ... pt.DataFrame( - ... { - ... "id": [1, 2], - ... "supplier_name": ["Banana Republic", "Applies Inc."], - ... } - ... ) - ... ).set_alias("suppliers") - >>> relation = ( - ... products.set_alias("p") - ... .inner_join( - ... suppliers.set_alias("s"), - ... on="p.supplier_id = s.id", - ... ) - ... .aggregate( - ... "supplier_name", - ... num_products="count(product_name)", - ... group_by=["supplier_id", "supplier_name"], - ... ) - ... ) - >>> print(str(relation)) # xdoctest: +SKIP - --------------------- - --- Relation Tree --- - --------------------- - Aggregate [supplier_name, count(product_name)] - Join INNER p.supplier_id = s.id - arrow_scan(94609350519648, 140317161740928, 140317161731168, 1000000) - arrow_scan(94609436221024, 140317161740928, 140317161731168, 1000000)\ - - --------------------- - -- Result Columns -- - --------------------- - - supplier_name (VARCHAR) - - num_products (BIGINT)\ - - --------------------- - -- Result Preview -- - --------------------- - supplier_name num_products - VARCHAR BIGINT - [ Rows: 2] - Applies Inc. 2 - Banana Republic 1 - - """ - return str(self._relation) - - def _wrap( - self: RelationType, - relation: "duckdb.DuckDBPyRelation", - schema_change: bool = False, - ) -> RelationType: - """ - Wrap DuckDB Relation object in same Relation wrapper class as self. - - This will preserve the type of the relation, even for subclasses Relation. - It should therefore only be used for relations which can be considered schema- - compatible with the original relation. Otherwise set schema_change to True - in order to create a Relation base object instead. - """ - return type(self)( - derived_from=relation, - database=self.database, - model=self.model if not schema_change else None, - ) - - -class Database: - # Types created in order to represent enum strings - enum_types: Set[str] - - def __init__( - self, - path: Optional[Path] = None, - read_only: bool = False, - **kwargs: Any, # noqa: ANN401 - ) -> None: - """ - Instantiate a new DuckDB database, either persisted to disk or in-memory. - - Args: - path: Optional path to store all the data to. If ``None`` the data is - persisted in-memory only. - read_only: If the database connection should be a read-only connection. - **kwargs: Additional keywords forwarded to ``duckdb.connect()``. - - Examples: - >>> import patito as pt - >>> db = pt.duckdb.Database() - >>> db.to_relation("select 1 as a, 2 as b").create_table("my_table") - >>> db.query("select * from my_table").to_df() - shape: (1, 2) - ┌─────┬─────┐ - │ a ┆ b │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞═════╪═════╡ - │ 1 ┆ 2 │ - └─────┴─────┘ - """ - import duckdb - - self.path = path - self.connection = duckdb.connect( - database=str(path) if path else ":memory:", - read_only=read_only, - **kwargs, - ) - self.enum_types: Set[str] = set() - - @classmethod - def default(cls) -> Database: - """ - Return the default DuckDB database. - - Returns: - A patito :ref:`Database` object wrapping around the given - connection. - - Example: - >>> import patito as pt - >>> db = pt.duckdb.Database.default() - >>> db.query("select 1 as a, 2 as b").to_df() - shape: (1, 2) - ┌─────┬─────┐ - │ a ┆ b │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞═════╪═════╡ - │ 1 ┆ 2 │ - └─────┴─────┘ - """ - import duckdb - - return cls.from_connection(duckdb.default_connection) - - @classmethod - def from_connection(cls, connection: "duckdb.DuckDBPyConnection") -> Database: - """ - Create database from native DuckDB connection object. - - Args: - connection: A native DuckDB connection object created with - ``duckdb.connect()``. - - Returns: - A :ref:`Database` object wrapping around the given - connection. - - Example: - >>> import duckdb - >>> import patito as pt - >>> connection = duckdb.connect() - >>> database = pt.duckdb.Database.from_connection(connection) - """ - obj = cls.__new__(cls) - obj.connection = connection - obj.enum_types = set() - return obj - - def to_relation( - self, - derived_from: RelationSource, - ) -> Relation: - """ - Create a new relation object based on data source. - - The given data will be represented as a relation associated with the database. - ``Database(x).to_relation(y)`` is equivalent to - ``Relation(y, database=Database(x))``. - - Args: - derived_from (RelationSource): One of either a polars or pandas - ``DataFrame``, a ``pathlib.Path`` to a parquet or CSV file, a SQL query - string, or an existing relation. - - Example: - >>> import patito as pt - >>> db = pt.duckdb.Database() - >>> db.to_relation("select 1 as a, 2 as b").to_df() - shape: (1, 2) - ┌─────┬─────┐ - │ a ┆ b │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞═════╪═════╡ - │ 1 ┆ 2 │ - └─────┴─────┘ - >>> db.to_relation(pt.DataFrame({"c": [3, 4], "d": ["5", "6"]})).to_df() - shape: (2, 2) - ┌─────┬─────┐ - │ c ┆ d │ - │ --- ┆ --- │ - │ i64 ┆ str │ - ╞═════╪═════╡ - │ 3 ┆ 5 │ - │ 4 ┆ 6 │ - └─────┴─────┘ - """ - return Relation( - derived_from=derived_from, - database=self, - ) - - def execute( - self, - query: str, - *parameters: Collection[Union[str, int, float, bool]], - ) -> None: - """ - Execute SQL query in DuckDB database. - - Args: - query: A SQL statement to execute. Does `not` have to be terminated with - a semicolon (``;``). - parameters: One or more sets of parameters to insert into prepared - statements. The values are replaced in place of the question marks - (``?``) in the prepared query. - - Example: - >>> import patito as pt - >>> db = pt.duckdb.Database() - >>> db.execute("create table my_table (x bigint);") - >>> db.execute("insert into my_table values (1), (2), (3)") - >>> db.table("my_table").to_df() - shape: (3, 1) - ┌─────┐ - │ x │ - │ --- │ - │ i64 │ - ╞═════╡ - │ 1 │ - │ 2 │ - │ 3 │ - └─────┘ - - Parameters can be specified when executing prepared queries. - - >>> db.execute("delete from my_table where x = ?", (2,)) - >>> db.table("my_table").to_df() - shape: (2, 1) - ┌─────┐ - │ x │ - │ --- │ - │ i64 │ - ╞═════╡ - │ 1 │ - │ 3 │ - └─────┘ - - Multiple parameter sets can be specified when executing multiple prepared - queries. - - >>> db.execute( - ... "delete from my_table where x = ?", - ... (1,), - ... (3,), - ... ) - >>> db.table("my_table").to_df() - shape: (0, 1) - ┌─────┐ - │ x │ - │ --- │ - │ i64 │ - ╞═════╡ - └─────┘ - """ - duckdb_parameters: Union[ - Collection[Union[str, int, float, bool]], - Collection[Collection[Union[str, int, float, bool]]], - None, - ] - if parameters is None or len(parameters) == 0: - duckdb_parameters = [] - multiple_parameter_sets = False - elif len(parameters) == 1: - duckdb_parameters = parameters[0] - multiple_parameter_sets = False - else: - duckdb_parameters = parameters - multiple_parameter_sets = True - - self.connection.execute( - query=query, - parameters=duckdb_parameters, - multiple_parameter_sets=multiple_parameter_sets, - ) - - def query(self, query: str, alias: str = "query_relation") -> Relation: - """ - Execute arbitrary SQL select query and return the relation. - - Args: - query: Arbitrary SQL select query. - alias: The alias to assign to the resulting relation, to be used in further - queries. - - Returns: A relation representing the data produced by the given query. - - Example: - >>> import patito as pt - >>> db = pt.duckdb.Database() - >>> relation = db.query("select 1 as a, 2 as b, 3 as c") - >>> relation.to_df() - shape: (1, 3) - ┌─────┬─────┬─────┐ - │ a ┆ b ┆ c │ - │ --- ┆ --- ┆ --- │ - │ i64 ┆ i64 ┆ i64 │ - ╞═════╪═════╪═════╡ - │ 1 ┆ 2 ┆ 3 │ - └─────┴─────┴─────┘ - - >>> relation = db.query("select 1 as a, 2 as b, 3 as c", alias="my_alias") - >>> relation.select("my_alias.a").to_df() - shape: (1, 1) - ┌─────┐ - │ a │ - │ --- │ - │ i64 │ - ╞═════╡ - │ 1 │ - └─────┘ - """ - return Relation( - self.connection.query(query=query, alias=alias), - database=self, - ) - - def empty_relation(self, schema: Type[ModelType]) -> Relation[ModelType]: - """ - Create relation with zero rows, but correct schema that matches the given model. - - Args: - schema: A patito model which specifies the column names and types of the - given relation. - - Example: - >>> import patito as pt - >>> class Schema(pt.Model): - ... string_column: str - ... bool_column: bool - ... - >>> db = pt.duckdb.Database() - >>> empty_relation = db.empty_relation(Schema) - >>> empty_relation.to_df() - shape: (0, 2) - ┌───────────────┬─────────────┐ - │ string_column ┆ bool_column │ - │ --- ┆ --- │ - │ str ┆ bool │ - ╞═══════════════╪═════════════╡ - └───────────────┴─────────────┘ - >>> non_empty_relation = db.query( - ... "select 'dummy' as string_column, true as bool_column" - ... ) - >>> non_empty_relation.union(empty_relation).to_df() - shape: (1, 2) - ┌───────────────┬─────────────┐ - │ string_column ┆ bool_column │ - │ --- ┆ --- │ - │ str ┆ bool │ - ╞═══════════════╪═════════════╡ - │ dummy ┆ true │ - └───────────────┴─────────────┘ - """ - return self.to_relation(schema.examples()).limit(0) - - def table(self, name: str) -> Relation: - """ - Return relation representing all the data in the given table. - - Args: - name: The name of the table. - - Example: - >>> import patito as pt - >>> df = pt.DataFrame({"a": [1, 2], "b": [3, 4]}) - >>> db = pt.duckdb.Database() - >>> relation = db.to_relation(df) - >>> relation.create_table(name="my_table") - >>> db.table("my_table").to_df() - shape: (2, 2) - ┌─────┬─────┐ - │ a ┆ b │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞═════╪═════╡ - │ 1 ┆ 3 │ - │ 2 ┆ 4 │ - └─────┴─────┘ - """ - return Relation( - self.connection.table(name), - database=self.from_connection(self.connection), - ) - - def view(self, name: str) -> Relation: - """ - Return relation representing all the data in the given view. - - Args: - name: The name of the view. - - Example: - >>> import patito as pt - >>> df = pt.DataFrame({"a": [1, 2], "b": [3, 4]}) - >>> db = pt.duckdb.Database() - >>> relation = db.to_relation(df) - >>> relation.create_view(name="my_view") - >>> db.view("my_view").to_df() - shape: (2, 2) - ┌─────┬─────┐ - │ a ┆ b │ - │ --- ┆ --- │ - │ i64 ┆ i64 │ - ╞═════╪═════╡ - │ 1 ┆ 3 │ - │ 2 ┆ 4 │ - └─────┴─────┘ - """ - return Relation( - self.connection.view(name), - database=self.from_connection(self.connection), - ) - - def create_table( - self, - name: str, - model: Type[ModelType], - ) -> Relation[ModelType]: - """ - Create table with schema matching the provided Patito model. - - See :ref:`Relation.insert_into()` for how to insert - data into the table after creation. - The :ref:`Relation.create_table()` method can also - be used to create a table from a given relation `and` insert the data at the - same time. - - Args: - name: Name of new database table. - model (Type[Model]): Patito model indicating names and types of table - columns. - Returns: - Relation[ModelType]: Relation pointing to the new table. - - Example: - >>> from typing import Optional - >>> import patito as pt - >>> class MyModel(pt.Model): - ... str_column: str - ... nullable_string_column: Optional[str] - ... - >>> db = pt.duckdb.Database() - >>> db.create_table(name="my_table", model=MyModel) - >>> db.table("my_table").types - {'str_column': VARCHAR, 'nullable_string_column': VARCHAR} - """ - self.create_enum_types(model=model) - schema = model.schema() - non_nullable = schema.get("required", []) - columns = [] - for column_name, sql_type in model.sql_types.items(): - column = f"{column_name} {sql_type}" - if column_name in non_nullable: - column += " not null" - columns.append(column) - self.connection.execute(f"create table {name} ({','.join(columns)})") - # TODO: Fix typing - return self.table(name).set_model(model) # pyright: ignore - - def create_enum_types(self, model: Type[ModelType]) -> None: - """ - Define SQL enum types in DuckDB database. - - Args: - model: Model for which all Literal-annotated or enum-annotated string fields - will get respective DuckDB enum types. - - Example: - >>> import patito as pt - >>> class EnumModel(pt.Model): - ... enum_column: Literal["A", "B", "C"] - ... - >>> db = pt.duckdb.Database() - >>> db.create_enum_types(EnumModel) - >>> db.enum_types - {'enum__7ba49365cc1b0fd57e61088b3bc9aa25'} - """ - import duckdb - - for props in model._schema_properties().values(): - if "enum" not in props or props["type"] != "string": - # DuckDB enums only support string values - continue - - enum_type_name = _enum_type_name(field_properties=props) - if enum_type_name in self.enum_types: - # This enum type has already been created - continue - - enum_values = ", ".join(repr(value) for value in sorted(props["enum"])) - try: - self.connection.execute( - f"create type {enum_type_name} as enum ({enum_values})" - ) - except duckdb.CatalogException as e: - if "already exists" not in str(e): - raise e # pragma: no cover - self.enum_types.add(enum_type_name) - - def create_view( - self, - name: str, - data: RelationSource, - ) -> Relation: - """Create a view based on the given data source.""" - return self.to_relation(derived_from=data).create_view(name) - - def __contains__(self, table: str) -> bool: - """ - Return ``True`` if the database contains a table with the given name. - - Args: - table: The name of the table to be checked for. - - Examples: - >>> import patito as pt - >>> db = pt.duckdb.Database() - >>> "my_table" in db - False - >>> db.to_relation("select 1 as a, 2 as b").create_table(name="my_table") - >>> "my_table" in db - True - """ - try: - self.connection.table(table_name=table) - return True - except Exception: - return False diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 44120c7..2da71d4 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -5,7 +5,6 @@ import json from collections.abc import Iterable from datetime import date, datetime -from functools import cached_property from typing import ( TYPE_CHECKING, Any, @@ -25,14 +24,12 @@ ) import polars as pl -from polars.datatypes import DataType, DataTypeClass, PolarsDataType, convert +from polars.datatypes import DataType, DataTypeClass from pydantic import ( # noqa: F401 BaseModel, - ConfigDict, create_model, field_serializer, fields, - JsonDict, ) from pydantic._internal._model_construction import ( ModelMetaclass as PydanticModelMetaclass, @@ -40,13 +37,11 @@ from patito._pydantic.dtypes import ( default_polars_dtype_for_annotation, - dtype_from_string, parse_composite_dtype, valid_polars_dtypes_for_annotation, validate_annotation, validate_polars_dtype, ) -from patito._pydantic.repr import display_as_type from patito.polars import DataFrame, LazyFrame from patito.validators import validate diff --git a/tests/test_database.py b/tests/test_database.py deleted file mode 100644 index 2ac320c..0000000 --- a/tests/test_database.py +++ /dev/null @@ -1,568 +0,0 @@ -import os -import sqlite3 -from datetime import datetime, timedelta -from pathlib import Path -from tempfile import TemporaryDirectory -from typing import TYPE_CHECKING, List, Optional - -import patito as pt -import polars as pl -import pytest - -if TYPE_CHECKING: - import pyarrow as pa # type: ignore -else: - # Python 3.7 does not support pyarrow - pa = pytest.importorskip("pyarrow") - - -class LoggingQuerySource(pt.Database): - """A dummy query source with an associated query execution log.""" - - executed_queries: List[str] - - -@pytest.fixture() -def query_cache(tmp_path) -> LoggingQuerySource: - """ - Return dummy query cache with query execution logger. - - Args: - tmp_path: Test-specific temporary directory provided by pytest. - - Returns: - A cacher which also keeps track of the executed queries. - """ - # Keep track of the executed queries in a mutable list - executed_queries = [] - - # Unless other is specified, some dummy data is always returned - def query_handler(query, mock_data: Optional[dict] = None) -> pa.Table: - executed_queries.append(query) - data = {"column": [1, 2, 3]} if mock_data is None else mock_data - return pa.Table.from_pydict(data) - - query_cache = LoggingQuerySource( - query_handler=query_handler, - cache_directory=tmp_path, - default_ttl=timedelta(weeks=52), - ) - - # Attach the query execution log as an attribute of the query source - query_cache.executed_queries = executed_queries - return query_cache - - -@pytest.fixture -def query_source(tmpdir) -> LoggingQuerySource: - """ - A QuerySource connected to an in-memory SQLite3 database with dummy data. - - Args: - tmpdir: Test-specific temporary directory provided by pytest. - - Returns: - A query source which also keeps track of the executed queries. - """ - # Keep track of the executed queries in a mutable list - executed_queries = [] - - def dummy_database() -> sqlite3.Cursor: - connection = sqlite3.connect(":memory:") - cursor = connection.cursor() - cursor.execute("CREATE TABLE movies(title, year, score)") - data = [ - ("Monty Python Live at the Hollywood Bowl", 1982, 7.9), - ("Monty Python's The Meaning of Life", 1983, 7.5), - ("Monty Python's Life of Brian", 1979, 8.0), - ] - cursor.executemany("INSERT INTO movies VALUES(?, ?, ?)", data) - connection.commit() - return cursor - - def query_handler(query: str) -> pa.Table: - cursor = dummy_database() - cursor.execute(query) - executed_queries.append(query) - columns = [description[0] for description in cursor.description] - data = [dict(zip(columns, row)) for row in cursor.fetchall()] - return pa.Table.from_pylist(data) - - # Attach the query execution log as an attribute of the query source - tmp_dir = Path(tmpdir) - query_cache = LoggingQuerySource( - query_handler=query_handler, - cache_directory=tmp_dir, - ) - query_cache.executed_queries = executed_queries - return query_cache - - -def test_uncached_query(query_cache: LoggingQuerySource): - """It should not cache queries by default.""" - - @query_cache.as_query() - def products(): - return "query" - - # First time it is called we should execute the query - products() - assert query_cache.executed_queries == ["query"] - # And no cache file is created - assert not any(query_cache.cache_directory.iterdir()) - - # The next time the query is executed again - products() - assert query_cache.executed_queries == ["query", "query"] - # And still no cache file - assert not any(query_cache.cache_directory.iterdir()) - - -def test_cached_query(query_cache: LoggingQuerySource): - """It should cache queries if so parametrized.""" - - # We enable cache for the given query - @query_cache.as_query(cache=True) - def products(version: int): - return f"query {version}" - - # The cache is stored in the "products" sub-folder - cache_dir = query_cache.cache_directory / "products" - - # First time the query is executed - products(version=1) - assert query_cache.executed_queries == ["query 1"] - # And the result is stored in a cache file - assert len(list(cache_dir.iterdir())) == 1 - - # The next time the query is *not* executed - products(version=1) - assert query_cache.executed_queries == ["query 1"] - # And the cache file persists - assert len(list(cache_dir.iterdir())) == 1 - - # But if we change the query itself, it is executed - products(version=2) - assert query_cache.executed_queries == ["query 1", "query 2"] - # And it is cached in a separate file - assert len(list(cache_dir.iterdir())) == 2 - - # If we delete the cache file, the query is re-executed - for cache_file in cache_dir.iterdir(): - cache_file.unlink() - products(version=1) - assert query_cache.executed_queries == ["query 1", "query 2", "query 1"] - # And the cache file is rewritten - assert len(list(cache_dir.iterdir())) == 1 - - # We clear the cache with .clear_cache() - products.refresh_cache(version=1) - assert query_cache.executed_queries == ["query 1", "query 2", "query 1", "query 1"] - # We can also clear caches that have never existed - products.refresh_cache(version=3) - assert query_cache.executed_queries[-1] == "query 3" - - -def test_cached_query_with_explicit_path( - query_cache: LoggingQuerySource, - tmpdir: Path, -) -> None: - """It should cache queries in the provided path.""" - cache_path = Path(tmpdir / "name.parquet") - - # This time we specify an explicit path - @query_cache.as_query(cache=cache_path) - def products(version): - return f"query {version}" - - # At first the path does not exist - assert not cache_path.exists() - - # We then execute and cache the query - products(version=1) - assert cache_path.exists() - assert query_cache.executed_queries == ["query 1"] - - # And the next time it is reused - products(version=1) - assert query_cache.executed_queries == ["query 1"] - assert cache_path.exists() - - # If the query changes, it is re-executed - products(version=2) - assert query_cache.executed_queries == ["query 1", "query 2"] - - # If a non-parquet file is specified, it will raise - with pytest.raises( - ValueError, - match=r"Cache paths must have the '\.parquet' file extension\!", - ): - - @query_cache.as_query(cache=tmpdir / "name.csv") - def products(version): - return f"query {version}" - - -def test_cached_query_with_relative_path(query_cache: LoggingQuerySource) -> None: - """Relative paths should be interpreted relative to the cache directory.""" - relative_path = Path("foo/bar.parquet") - - @query_cache.as_query(cache=relative_path) - def products(): - return "query" - - products() - assert (query_cache.cache_directory / "foo" / "bar.parquet").exists() - - -def test_cached_query_with_format_string(query_cache: LoggingQuerySource) -> None: - """Strings with placeholders should be interpolated.""" - - @query_cache.as_query(cache="version-{version}.parquet") - def products(version: int): - return f"query {version}" - - # It should work for both positional arguments... - products(1) - assert (query_cache.cache_directory / "version-1.parquet").exists() - # ... and keywords - products(version=2) - assert (query_cache.cache_directory / "version-2.parquet").exists() - - -def test_cached_query_with_format_path(query_cache: LoggingQuerySource) -> None: - """Paths with placeholders should be interpolated.""" - - @query_cache.as_query( - cache=query_cache.cache_directory / "version-{version}.parquet" - ) - def products(version: int): - return f"query {version}" - - # It should work for both positional arguments... - products(1) - assert (query_cache.cache_directory / "version-1.parquet").exists() - # ... and keywords - products(version=2) - assert (query_cache.cache_directory / "version-2.parquet").exists() - - -def test_cache_ttl(query_cache: LoggingQuerySource, monkeypatch): - """It should automatically refresh the cache according to the TTL.""" - - # We freeze the time during the execution of this test - class FrozenDatetime: - def __init__(self, year: int, month: int, day: int) -> None: - self.frozen_time = datetime(year=year, month=month, day=day) - monkeypatch.setattr(pt.database, "datetime", self) # pyright: ignore - - def now(self): - return self.frozen_time - - @staticmethod - def fromisoformat(*args, **kwargs): - return datetime.fromisoformat(*args, **kwargs) - - # The cache should be cleared every week - @query_cache.as_query(cache=True, ttl=timedelta(weeks=1)) - def users(): - return "query" - - # The first time the query should be executed - FrozenDatetime(year=2000, month=1, day=1) - users() - assert query_cache.executed_queries == ["query"] - - # The next time it should not be executed - users() - assert query_cache.executed_queries == ["query"] - - # Even if we advance the time by one day, - # the cache should still be used. - FrozenDatetime(year=2000, month=1, day=2) - users() - assert query_cache.executed_queries == ["query"] - - # Then we let one week pass, and the cache should be cleared - FrozenDatetime(year=2000, month=1, day=8) - users() - assert query_cache.executed_queries == ["query", "query"] - - # But then it will be reused for another week - users() - assert query_cache.executed_queries == ["query", "query"] - - -@pytest.mark.parametrize("cache", [True, False]) -def test_lazy_query(query_cache: LoggingQuerySource, cache: bool): - """It should return a LazyFrame when specified with lazy=True.""" - - @query_cache.as_query(lazy=True, cache=cache) - def lazy(): - return "query" - - @query_cache.as_query(lazy=False, cache=cache) - def eager(): - return "query" - - # We invoke it twice, first not hitting the cache, and then hitting it - assert lazy().collect().frame_equal(eager()) - assert lazy().collect().frame_equal(eager()) - - -def test_model_query_model_validation(query_cache: LoggingQuerySource): - """It should validate the data model.""" - - class CorrectModel(pt.Model): - column: int - - @query_cache.as_query(model=CorrectModel) - def correct_data(): - return "" - - assert isinstance(correct_data(), pl.DataFrame) - - class IncorrectModel(pt.Model): - column: str - - @query_cache.as_query(model=IncorrectModel) - def incorrect_data(): - return "" - - with pytest.raises(pt.exceptions.ValidationError): - incorrect_data() - - -def test_custom_forwarding_of_parameters_to_query_function( - query_cache: LoggingQuerySource, -): - """It should forward all additional parameters to the sql_to_arrow function.""" - - # The dummy cacher accepts a "data" parameter, specifying the data to be returned - data = {"actual_data": [10, 20, 30]} - - @query_cache.as_query(mock_data=data) - def custom_data(): - return "select 1, 2, 3 as dummy_column" - - assert custom_data().frame_equal(pl.DataFrame(data)) - - # It should also work without type normalization - @query_cache.as_query(mock_data=data, cast_to_polars_equivalent_types=False) - def non_normalized_custom_data(): - return "select 1, 2, 3 as dummy_column" - - assert non_normalized_custom_data().frame_equal(pl.DataFrame(data)) - - -def test_clear_caches(query_cache: LoggingQuerySource): - """It should clear all cache files with .clear_all_caches().""" - - @query_cache.as_query(cache=True) - def products(version: int): - return f"query {version}" - - # The cache is stored in the "products" sub-directory - products_cache_dir = query_cache.cache_directory / "products" - - # We produce two cache files - products(version=1) - products(version=2) - assert query_cache.executed_queries == ["query 1", "query 2"] - assert len(list(products_cache_dir.iterdir())) == 2 - - # We also insert another parquet file that should *not* be deleted - dummy_parquet_path = products_cache_dir / "dummy.parquet" - pl.DataFrame().write_parquet(dummy_parquet_path) - - # And an invalid parquet file - invalid_parquet_path = products_cache_dir / "invalid.parquet" - invalid_parquet_path.write_bytes(b"invalid content") - - # We delete all caches, but not the dummy parquet file - products.clear_caches() - assert len(list(products_cache_dir.iterdir())) == 2 - assert dummy_parquet_path.exists() - assert invalid_parquet_path.exists() - - # The next time both queries need to be re-executed - products(version=1) - products(version=2) - assert query_cache.executed_queries == ["query 1", "query 2"] * 2 - assert len(list(products_cache_dir.iterdir())) == 4 - - # If caching is not enabled, clear_caches should be a NO-OP - @query_cache.as_query(cache=False) - def uncached_products(version: int): - return f"query {version}" - - uncached_products.clear_caches() - - -def test_clear_caches_with_formatted_paths(query_cache: LoggingQuerySource): - """Formatted paths should also be properly cleared.""" - # We specify another temporary cache directory to see if caches can be cleared - # irregardless of the cache directory's location. - tmp_dir = TemporaryDirectory() - cache_dir = Path(tmp_dir.name) - - @query_cache.as_query(cache=cache_dir / "{a}" / "{b}.parquet") - def users(a: int, b: int): - return f"query {a}.{b}" - - users(1, 1) - users(1, 2) - users(2, 1) - - assert query_cache.executed_queries == ["query 1.1", "query 1.2", "query 2.1"] - - assert {str(path.relative_to(cache_dir)) for path in cache_dir.rglob("*")} == { - # Both directories have been created - "1", - "2", - # Two cache files for a=1 - "1/1.parquet", - "1/2.parquet", - # One cache file for a=2 - "2/1.parquet", - } - - # We insert another parquet file that should *not* be cleared - pl.DataFrame().write_parquet(cache_dir / "1" / "3.parquet") - - # Only directories and non-cached files should be kept - users.clear_caches() - assert {str(path.relative_to(cache_dir)) for path in cache_dir.rglob("*")} == { - "1", - "2", - "1/3.parquet", - } - tmp_dir.cleanup() - - -def test_ejection_of_incompatible_caches(query_cache: LoggingQuerySource): - """It should clear old, incompatible caches.""" - - cache_path = query_cache.cache_directory / "my_cache.parquet" - - @query_cache.as_query(cache=cache_path) - def my_query(): - return "my query" - - # Write a parquet file without any metadata - pl.DataFrame().write_parquet(cache_path) - - # The existing parquet file without metadata should be overwritten - df = my_query() - assert not df.is_empty() - assert query_cache.executed_queries == ["my query"] - - # Now we decrement the version number of the cache in order to overwrite it - arrow_table = pa.parquet.read_table(cache_path) # noqa - metadata = arrow_table.schema.metadata - assert ( - int.from_bytes(metadata[b"cache_version"], "little") - == pt.database.CACHE_VERSION # pyright: ignore - ) - metadata[b"cache_version"] = ( - pt.database.CACHE_VERSION - 1 # pyright: ignore - ).to_bytes( - length=16, - byteorder="little", - signed=False, - ) - pa.parquet.write_table( - arrow_table.replace_schema_metadata(metadata), - where=cache_path, - ) - - # The query should now be re-executed - my_query() - assert query_cache.executed_queries == ["my query"] * 2 - - # Deleting the cache_version alltogether should also retrigger the query - del metadata[b"cache_version"] - pa.parquet.write_table( - arrow_table.replace_schema_metadata(metadata), - where=cache_path, - ) - my_query() - assert query_cache.executed_queries == ["my query"] * 3 - - -def test_adherence_to_xdg_directory_standard(monkeypatch, tmpdir): - """It should use XDG Cache Home when no cache directory is specified.""" - xdg_cache_home = tmpdir / ".cache" - os.environ["XDG_CACHE_HOME"] = str(xdg_cache_home) - query_source = pt.Database(query_handler=lambda query: pa.Table()) - assert query_source.cache_directory == xdg_cache_home / "patito" - - del os.environ["XDG_CACHE_HOME"] - query_source = pt.Database(query_handler=lambda query: pa.Table()) - assert query_source.cache_directory == Path("~/.cache/patito").resolve() - - -def test_invoking_query_source_directly_with_query_string( - query_source: LoggingQuerySource, -): - """It should accept SQL queries directly, not ony query constructors.""" - sql = "select * from movies" - movies = query_source.query(sql) - assert query_source.executed_queries == [sql] - assert len(list(query_source.cache_directory.iterdir())) == 0 - assert movies.height == 3 - - for _ in range(2): - query_source.query(sql, cache=True) - assert query_source.executed_queries == [sql] * 2 - assert ( - len(list((query_source.cache_directory / "__direct_query").iterdir())) == 1 - ) - - assert query_source.query(sql, lazy=True).collect().frame_equal(movies) - - -@pytest.mark.skip(reason="TODO: Future feature to implement") -def test_custom_kwarg_hashing(tmp_path): - """You should be able to hash the keyword arguments passed to the query handler.""" - - executed_queries = [] - - def query_handler(query: str, prod=False) -> pa.Table: - executed_queries.append(query) - return pa.Table.from_pydict({"column": [1, 2, 3]}) - - def query_handler_hasher(query: str, prod: bool) -> bytes: - return bytes(prod) - - dummy_source = pt.Database( - query_handler=query_handler, - query_handler_hasher=query_handler_hasher, # pyright: ignore - cache_directory=tmp_path, - ) - - # The first time the query should be executed - sql_query = "select * from my_table" - dummy_source.query(sql_query, cache=True) - assert executed_queries == [sql_query] - assert len(list(dummy_source.cache_directory.rglob("*.parquet"))) == 1 - - # The second time the dev query has been cached - dummy_source.query(sql_query, cache=True) - assert executed_queries == [sql_query] - assert len(list(dummy_source.cache_directory.rglob("*.parquet"))) == 1 - - # The production query has never executed, so a new query is executed - dummy_source.query(sql_query, cache=True, prod=True) - assert executed_queries == [sql_query] * 2 - assert len(list(dummy_source.cache_directory.rglob("*.parquet"))) == 2 - - # Then the production query cache is used - dummy_source.query(sql_query, cache=True, prod=True) - assert executed_queries == [sql_query] * 2 - assert len(list(dummy_source.cache_directory.rglob("*.parquet"))) == 2 - - # And the dev query cache still remains - dummy_source.query(sql_query, cache=True) - assert executed_queries == [sql_query] * 2 - assert len(list(dummy_source.cache_directory.rglob("*.parquet"))) == 2 diff --git a/tests/test_dtypes.py b/tests/test_dtypes.py index 52a8260..839b392 100644 --- a/tests/test_dtypes.py +++ b/tests/test_dtypes.py @@ -10,7 +10,6 @@ DURATION_DTYPES, FLOAT_DTYPES, INTEGER_DTYPES, - PT_BASE_SUPPORTED_DTYPES, STRING_DTYPES, TIME_DTYPES, DataTypeGroup, diff --git a/tests/test_duckdb/__init__.py b/tests/test_duckdb/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/test_duckdb/test_database.py b/tests/test_duckdb/test_database.py deleted file mode 100644 index 88e670b..0000000 --- a/tests/test_duckdb/test_database.py +++ /dev/null @@ -1,276 +0,0 @@ -"""Tests for patito.Database.""" -import enum -from typing import Optional - -import patito as pt -import polars as pl -import pytest -from typing_extensions import Literal - -# Skip test module if DuckDB is not installed -if not pt._DUCKDB_AVAILABLE: - pytest.skip("DuckDB not installed", allow_module_level=True) - - -def test_database(tmp_path): - """Test functionality of Database class.""" - # Create a new in-memory database - db = pt.duckdb.Database() - - # Insert a simple dataframe as a new table - table_df = pl.DataFrame( - { - "column_1": [1, 2, 3], - "column_2": ["a", "b", "c"], - } - ) - db.to_relation(table_df).create_table(name="table_name_1") - - # Check that a round-trip to and from the database preserves the data - db_table = db.table("table_name_1").to_df() - assert db_table is not table_df - assert table_df.frame_equal(db_table) - - # Check that new database objects are isolated from previous ones - another_db = pt.duckdb.Database() - with pytest.raises( - Exception, - match=r"Catalog Error\: Table 'table_name_1' does not exist!", - ): - db_table = another_db.table("table_name_1") - - # Check the parquet reading functionality - parquet_path = tmp_path / "tmp.parquet" - table_df.write_parquet(str(parquet_path), compression="snappy") - new_relation = another_db.to_relation(parquet_path) - new_relation.create_table(name="parquet_table") - assert another_db.table("parquet_table").count() == 3 - - -def test_file_database(tmp_path): - """Check if the Database can be persisted to a file.""" - # Insert some data into a file-backed database - db_path = tmp_path / "tmp.db" - file_db = pt.duckdb.Database(path=db_path) - file_db.to_relation("select 1 as a, 2 as b").create_table(name="table") - before_df = file_db.table("table").to_df() - - # Delete the database - del file_db - - # And restore tha data from the file - restored_db = pt.duckdb.Database(path=db_path) - after_df = restored_db.table("table").to_df() - - # The data should still be the same - assert before_df.frame_equal(after_df) - - -def test_database_create_table(): - """Tests for patito.Database.create_table().""" - - # A pydantic basemodel is used to specify the table schema - # We inherit here in order to make sure that inheritance works as intended - class BaseModel(pt.Model): - int_column: int - optional_int_column: Optional[int] - str_column: str - - class Model(BaseModel): - optional_str_column: Optional[str] - bool_column: bool - optional_bool_column: Optional[bool] - enum_column: Literal["a", "b", "c"] - - # We crate the table schema - db = pt.duckdb.Database() - table = db.create_table(name="test_table", model=Model) - - # We insert some dummy data into the new table - dummy_relation = db.to_relation(Model.examples({"optional_int_column": [1, None]})) - dummy_relation.insert_into(table="test_table") - - # But we should not be able to insert null data in non-optional columns - null_relation = dummy_relation.drop("int_column").select("null as int_column, *") - with pytest.raises( - Exception, - match=("Constraint Error: NOT NULL constraint failed: test_table.int_column"), - ): - null_relation.insert_into(table="test_table") - - # Check if the correct columns and types have been set - assert table.columns == [ - "int_column", - "optional_int_column", - "str_column", - "optional_str_column", - "bool_column", - "optional_bool_column", - "enum_column", - ] - assert list(table.types.values()) == [ - "INTEGER", - "INTEGER", - "VARCHAR", - "VARCHAR", - "BOOLEAN", - "BOOLEAN", - pt.duckdb._enum_type_name( # pyright: ignore - field_properties=Model.model_json_schema()["properties"]["enum_column"] - ), - ] - - -def test_create_view(): - """It should be able to create a view from a relation source.""" - db = pt.duckdb.Database() - df = pt.DataFrame({"a": [1, 2], "b": [3.0, 4.0]}) - db.create_view(name="my_view", data=df) - assert db.view("my_view").to_df().frame_equal(df) - - -def test_validate_non_nullable_enum_columns(): - """Enum columns should be null-validated.""" - - class EnumModel(pt.Model): - non_nullable_enum_column: Literal["a", "b", "c"] - nullable_enum_column: Optional[Literal["a", "b", "c"]] - - db = pt.duckdb.Database() - db.create_table(name="enum_table", model=EnumModel) - - # We allow null values in nullable_enum_column - valid_relation = db.to_relation( - "select 'a' as non_nullable_enum_column, null as nullable_enum_column" - ) - valid_relation.insert_into("enum_table") - - # But we do not allow it in non_nullable_enum_column - invalid_relation = db.to_relation( - "select null as non_nullable_enum_column, 'a' as nullable_enum_column" - ) - with pytest.raises( - Exception, - match=( - "Constraint Error: " - "NOT NULL constraint failed: " - "enum_table.non_nullable_enum_column" - ), - ): - invalid_relation.insert_into(table="enum_table") - - # The non-nullable enum column should do enum value validation - invalid_relation = db.to_relation( - "select 'd' as non_nullable_enum_column, 'a' as nullable_enum_column" - ) - with pytest.raises( - Exception, - match="Conversion Error: Could not convert string 'd' to UINT8", - ): - invalid_relation.insert_into(table="enum_table") - - # And the nullable enum column should do enum value validation - invalid_relation = db.to_relation( - "select 'a' as non_nullable_enum_column, 'd' as nullable_enum_column" - ) - with pytest.raises( - Exception, - match="Conversion Error: Could not convert string 'd' to UINT8", - ): - invalid_relation.insert_into(table="enum_table") - - -def test_table_existence_check(): - """You should be able to check for the existence of a table.""" - - class Model(pt.Model): - column_1: str - column_2: int - - # At first there is no table named "test_table" - db = pt.duckdb.Database() - assert "test_table" not in db - - # We create the table - db.create_table(name="test_table", model=Model) - - # And now the table should exist - assert "test_table" in db - - -def test_creating_enums_several_tiems(): - """Enums should be able to be defined several times.""" - - class EnumModel(pt.Model): - enum_column: Literal["a", "b", "c"] - - db = pt.duckdb.Database() - db.create_enum_types(EnumModel) - db.enum_types = set() - db.create_enum_types(EnumModel) - - -def test_use_of_same_enum_types_from_literal_annotation(): - """Identical literals should get the same DuckDB SQL enum type.""" - - class Table1(pt.Model): - column_1: Literal["a", "b"] - - class Table2(pt.Model): - column_2: Optional[Literal["b", "a"]] - - db = pt.duckdb.Database() - db.create_table(name="table_1", model=Table1) - db.create_table(name="table_2", model=Table2) - - assert ( - db.table("table_1").types["column_1"] == db.table("table_2").types["column_2"] - ) - - -def test_use_of_same_enum_types_from_enum_annotation(): - """Identical enums should get the same DuckDB SQL enum type.""" - - class ABEnum(enum.Enum): - ONE = "a" - TWO = "b" - - class BAEnum(enum.Enum): - TWO = "b" - ONE = "a" - - class Table1(pt.Model): - column_1: ABEnum - - class Table2(pt.Model): - column_2: Optional[BAEnum] - - db = pt.duckdb.Database() - db.create_table(name="table_1", model=Table1) - db.create_table(name="table_2", model=Table2) - - assert ( - db.table("table_1").types["column_1"] == db.table("table_2").types["column_2"] - ) - - -def test_execute(): - """It should be able to execute prepared statements.""" - db = pt.duckdb.Database() - db.execute("create table my_table (a int, b int, c int)") - db.execute("insert into my_table select ? as a, ? as b, ? as c", [2, 3, 4]) - assert ( - db.table("my_table") - .to_df() - .frame_equal(pt.DataFrame({"a": [2], "b": [3], "c": [4]})) - ) - db.execute( - "insert into my_table select ? as a, ? as b, ? as c", - [5, 6, 7], - [8, 9, 10], - ) - assert ( - db.table("my_table") - .to_df() - .frame_equal(pt.DataFrame({"a": [2, 5, 8], "b": [3, 6, 9], "c": [4, 7, 10]})) - ) diff --git a/tests/test_duckdb/test_relation.py b/tests/test_duckdb/test_relation.py deleted file mode 100644 index 60d8acb..0000000 --- a/tests/test_duckdb/test_relation.py +++ /dev/null @@ -1,1063 +0,0 @@ -import re -from datetime import date, timedelta -from pathlib import Path -from typing import Optional -from unittest.mock import MagicMock - -import patito as pt -import polars as pl -import pytest -from typing_extensions import Literal - -# Skip test module if DuckDB is not installed -if not pt._DUCKDB_AVAILABLE: - pytest.skip("DuckDB not installed", allow_module_level=True) - - -def test_relation(): - """Test functionality of Relation class.""" - # Create a new in-memory database with dummy data - db = pt.duckdb.Database() - table_df = pl.DataFrame( - { - "column_1": [1, 2, 3], - "column_2": ["a", "b", "c"], - } - ) - db.to_relation(table_df).create_table(name="table_name") - table_relation = db.table("table_name") - - # A projection can be done in several different ways - assert table_relation.select("column_1", "column_2") == table_relation.select( - "column_1, column_2" - ) - assert ( - table_relation.select("column_1, column_2") - == table_relation[["column_1, column_2"]] - ) - assert table_relation[["column_1, column_2"]] == table_relation - assert table_relation.select("column_1") != table_relation.select("column_2") - - # We can also use kewyrod arguments to rename columns - assert tuple(table_relation.select(column_3="column_1::varchar || column_2")) == ( - {"column_3": "1a"}, - {"column_3": "2b"}, - {"column_3": "3c"}, - ) - - # The .get() method should only work if the filter matches a single row - assert table_relation.get(column_1=1).column_2 == "a" - - # But raise if not exactly one matching row is found - with pytest.raises(RuntimeError, match="Relation.get(.*) returned 0 rows!"): - assert table_relation.get("column_1 = 4") - with pytest.raises(RuntimeError, match="Relation.get(.*) returned 2 rows!"): - assert table_relation.get("column_1 > 1") - - # The .get() should also accept a positional string - assert table_relation.get("column_1 < 2").column_2 == "a" - - # And several positional strings - assert table_relation.get("column_1 > 1", "column_1 < 3").column_2 == "b" - - # And a mix of positional and keyword arguments - assert table_relation.get("column_1 < 2", column_2="a").column_2 == "a" - - # Order by statements shoud be respected when iterating over the relation - assert tuple(table_relation.order("column_1 desc")) == ( - {"column_1": 3, "column_2": "c"}, - {"column_1": 2, "column_2": "b"}, - {"column_1": 1, "column_2": "a"}, - ) - - # The plus operator acts as a union all - assert ( - db.to_relation(table_df[:1]) - + db.to_relation(table_df[1:2]) - + db.to_relation(table_df[2:]) - ) == db.to_relation(table_df) - - # The union all must *not* remove duplicates - assert db.to_relation(table_df) + db.to_relation(table_df) != db.to_relation( - table_df - ) - assert db.to_relation(table_df) + db.to_relation(table_df) == db.to_relation( - pl.concat([table_df, table_df]) - ) - - # You should be able to subscript columns - assert table_relation["column_1"] == table_relation.select("column_1") - assert table_relation[["column_1", "column_2"]] == table_relation - - # The relation's columns can be retrieved - assert table_relation.columns == ["column_1", "column_2"] - - # You should be able to prefix and suffix all columns of a relation - assert table_relation.add_prefix("prefix_").columns == [ - "prefix_column_1", - "prefix_column_2", - ] - assert table_relation.add_suffix("_suffix").columns == [ - "column_1_suffix", - "column_2_suffix", - ] - - # You can drop one or more columns - assert table_relation.drop("column_1").columns == ["column_2"] - assert table_relation.select("*, 1 as column_3").drop( - "column_1", "column_2" - ).columns == ["column_3"] - - # You can rename columns - assert set(table_relation.rename(column_1="new_name").columns) == { - "new_name", - "column_2", - } - - # A value error must be raised if the source column does not exist - with pytest.raises( - ValueError, - match=( - "Column 'a' can not be renamed as it does not exist. " - "The columns of the relation are: column_[12], column_[12]" - ), - ): - table_relation.rename(a="new_name") - - # Null values should be correctly handled - none_df = pl.DataFrame({"column_1": [1, None]}) - none_relation = db.to_relation(none_df) - assert none_relation.filter("column_1 is null") == none_df.filter( - pl.col("column_1").is_null() - ) - - # The .inner_join() method should work as INNER JOIN, not LEFT or OUTER JOIN - left_relation = db.to_relation( - pl.DataFrame( - { - "left_primary_key": [1, 2], - "left_foreign_key": [10, 20], - } - ) - ) - right_relation = db.to_relation( - pl.DataFrame( - { - "right_primary_key": [10], - } - ) - ) - joined_table = pl.DataFrame( - { - "left_primary_key": [1], - "left_foreign_key": [10], - "right_primary_key": [10], - } - ) - assert ( - left_relation.set_alias("l").inner_join( - right_relation.set_alias("r"), - on="l.left_foreign_key = r.right_primary_key", - ) - == joined_table - ) - - # But the .left_join() method performs a LEFT JOIN - left_joined_table = pl.DataFrame( - { - "left_primary_key": [1, 2], - "left_foreign_key": [10, 20], - "right_primary_key": [10, None], - } - ) - assert ( - left_relation.set_alias("l").left_join( - right_relation.set_alias("r"), - on="l.left_foreign_key = r.right_primary_key", - ) - == left_joined_table - ) - - -def test_star_select(): - """It should select all columns with star.""" - df = pt.DataFrame({"a": [1, 2], "b": [3, 4]}) - relation = pt.duckdb.Relation(df) - assert relation.select("*") == relation - - -def test_casting_relations_between_database_connections(): - """It should raise when you try to mix databases.""" - db_1 = pt.duckdb.Database() - relation_1 = db_1.query("select 1 as a") - db_2 = pt.duckdb.Database() - relation_2 = db_2.query("select 1 as a") - with pytest.raises( - ValueError, - match="Relations can't be casted between database connections.", - ): - relation_1 + relation_2 # pyright: ignore - - -def test_creating_relation_from_pandas_df(): - """It should be able to create a relation from a pandas dataframe.""" - pd = pytest.importorskip("pandas") - pandas_df = pd.DataFrame({"a": [1, 2]}) - relation = pt.duckdb.Relation(pandas_df) - pd.testing.assert_frame_equal(relation.to_pandas(), pandas_df) - - -def test_creating_relation_from_a_csv_file(tmp_path): - """It should be able to create a relation from a CSV path.""" - df = pl.DataFrame({"a": [1, 2]}) - csv_path = tmp_path / "test.csv" - df.write_csv(csv_path) - relation = pt.duckdb.Relation(csv_path) - assert relation.to_df().frame_equal(df) - - -def test_creating_relation_from_a_parquet_file(tmp_path): - """It should be able to create a relation from a parquet path.""" - df = pl.DataFrame({"a": [1, 2]}) - parquet_path = tmp_path / "test.parquet" - df.write_parquet(parquet_path, compression="uncompressed") - relation = pt.duckdb.Relation(parquet_path) - assert relation.to_df().frame_equal(df) - - -def test_creating_relation_from_a_unknown_file_format(tmp_path): - """It should raise when you try to create relation from unknown path.""" - with pytest.raises( - ValueError, - match="Unsupported file suffix '.unknown' for data import!", - ): - pt.duckdb.Relation(Path("test.unknown")) - - with pytest.raises( - ValueError, - match="Unsupported file suffix '' for data import!", - ): - pt.duckdb.Relation(Path("test")) - - -def test_relation_with_default_database(): - """It should be constructable with the default DuckDB cursor.""" - import duckdb - - relation_a = pt.duckdb.Relation("select 1 as a") - assert relation_a.database.connection is duckdb.default_connection - - relation_a.create_view("table_a") - del relation_a - - relation_b = pt.duckdb.Relation("select 1 as b") - relation_b.create_view("table_b") - del relation_b - - default_database = pt.duckdb.Database.default() - joined_relation = default_database.query( - """ - select * - from table_a - inner join table_b - on a = b - """ - ) - assert joined_relation.to_df().frame_equal(pl.DataFrame({"a": [1], "b": [1]})) - - -def test_with_columns(): - """It should be able to crate new additional columns.""" - db = pt.duckdb.Database() - relation = db.to_relation("select 1 as a, 2 as b") - - # We can define a new column - extended_relation = relation.with_columns(c="a + b") - correct_extended = pl.DataFrame({"a": [1], "b": [2], "c": [3]}) - assert extended_relation.to_df().frame_equal(correct_extended) - - # Or even overwrite an existing column - overwritten_relation = relation.with_columns(a="a + b") - correct_overwritten = db.to_relation("select 2 as b, 3 as a").to_df() - assert overwritten_relation.to_df().frame_equal(correct_overwritten) - - -def test_rename_to_existing_column(): - """Renaming a column to overwrite another should work.""" - db = pt.duckdb.Database() - relation = db.to_relation("select 1 as a, 2 as b") - renamed_relation = relation.rename(b="a") - assert renamed_relation.columns == ["a"] - assert renamed_relation.get().a == 2 - - -def test_add_suffix(): - """It should be able to add suffixes to all column names.""" - db = pt.duckdb.Database() - relation = db.to_relation("select 1 as a, 2 as b") - assert relation.add_suffix("x").columns == ["ax", "bx"] - assert relation.add_suffix("x", exclude=["a"]).columns == ["a", "bx"] - assert relation.add_suffix("x", include=["a"]).columns == ["ax", "b"] - - with pytest.raises( - TypeError, - match="Both include and exclude provided at the same time!", - ): - relation.add_suffix("x", exclude=["a"], include=["b"]) - - -def test_add_prefix(): - """It should be able to add prefixes to all column names.""" - db = pt.duckdb.Database() - relation = db.to_relation("select 1 as a, 2 as b") - assert relation.add_prefix("x").columns == ["xa", "xb"] - assert relation.add_prefix("x", exclude=["a"]).columns == ["a", "xb"] - assert relation.add_prefix("x", include=["a"]).columns == ["xa", "b"] - - with pytest.raises( - TypeError, - match="Both include and exclude provided at the same time!", - ): - relation.add_prefix("x", exclude=["a"], include=["b"]) - - -def test_relation_aggregate_method(): - """Test for Relation.aggregate().""" - db = pt.duckdb.Database() - relation = db.to_relation( - pl.DataFrame( - { - "a": [1, 1, 2], - "b": [10, 100, 1000], - "c": [1, 2, 1], - } - ) - ) - aggregated_relation = relation.aggregate( - "a", - b_sum="sum(b)", - group_by="a", - ) - assert tuple(aggregated_relation) == ( - {"a": 1, "b_sum": 110}, - {"a": 2, "b_sum": 1000}, - ) - - aggregated_relation_with_multiple_group_by = relation.aggregate( - "a", - "c", - b_sum="sum(b)", - group_by=["a", "c"], - ) - assert tuple(aggregated_relation_with_multiple_group_by) == ( - {"a": 1, "c": 1, "b_sum": 10}, - {"a": 1, "c": 2, "b_sum": 100}, - {"a": 2, "c": 1, "b_sum": 1000}, - ) - - -def test_relation_all_method(): - """Test for Relation.all().""" - db = pt.duckdb.Database() - relation = db.to_relation( - pl.DataFrame( - { - "a": [1, 2, 3], - "b": [100, 100, 100], - } - ) - ) - - assert not relation.all(a=100) - assert relation.all(b=100) - assert relation.all("a < 4", b=100) - - -def test_relation_case_method(): - db = pt.duckdb.Database() - - df = pl.DataFrame( - { - "shelf_classification": ["A", "B", "A", "C", "D"], - "weight": [1, 2, 3, 4, 5], - } - ) - - correct_df = df.with_columns( - pl.Series([10, 20, 10, 0, None], dtype=pl.Int32).alias("max_weight") - ) - correct_mapped_actions = db.to_relation(correct_df) - - mapped_actions = db.to_relation(df).case( - from_column="shelf_classification", - to_column="max_weight", - mapping={"A": 10, "B": 20, "D": None}, - default=0, - ) - assert mapped_actions == correct_mapped_actions - - # We can also use the Case class - case_statement = pt.sql.Case( - on_column="shelf_classification", - mapping={"A": 10, "B": 20, "D": None}, - default=0, - ) - alt_mapped_actions = db.to_relation(df).select(f"*, {case_statement} as max_weight") - assert alt_mapped_actions == correct_mapped_actions - - -def test_relation_coalesce_method(): - """Test for Relation.coalesce().""" - db = pt.duckdb.Database() - df = pl.DataFrame( - {"column_1": [1.0, None], "column_2": [None, "2"], "column_3": [3.0, None]} - ) - relation = db.to_relation(df) - coalesce_result = relation.coalesce(column_1=10, column_2="20").to_df() - correct_coalesce_result = pl.DataFrame( - { - "column_1": [1.0, 10.0], - "column_2": ["20", "2"], - "column_3": [3.0, None], - } - ) - assert coalesce_result.frame_equal(correct_coalesce_result) - - -def test_relation_union_method(): - """Test for Relation.union and Relation.__add__.""" - db = pt.duckdb.Database() - left = db.to_relation("select 1 as a, 2 as b") - right = db.to_relation("select 200 as b, 100 as a") - correct_union = pl.DataFrame( - { - "a": [1, 100], - "b": [2, 200], - } - ) - assert left + right == correct_union - assert right + left == correct_union[["b", "a"]][::-1] - - assert left.union(right) == correct_union - assert right.union(left) == correct_union[["b", "a"]][::-1] - - incompatible = db.to_relation("select 1 as a") - with pytest.raises( - TypeError, - match="Union between relations with different column names is not allowed.", - ): - incompatible + right # pyright: ignore - with pytest.raises( - TypeError, - match="Union between relations with different column names is not allowed.", - ): - left + incompatible # pyright: ignore - - -def test_relation_model_functionality(): - """The end-user should be able to specify the constructor for row values.""" - db = pt.duckdb.Database() - - # We have two rows in our relation - first_row_relation = db.to_relation("select 1 as a, 2 as b") - second_row_relation = db.to_relation("select 3 as a, 4 as b") - relation = first_row_relation + second_row_relation - - # Iterating over the relation should yield the same as .get() - iterator_value = tuple(relation)[0] - get_value = relation.get("a = 1") - assert iterator_value == get_value - assert iterator_value.a == 1 - assert get_value.a == 1 - assert iterator_value.b == 2 - assert get_value.b == 2 - - # The end-user should be able to specify a custom row constructor - model_mock = MagicMock(return_value="mock_return") - new_relation = relation.set_model(model_mock) - assert new_relation.get("a = 1") == "mock_return" - model_mock.assert_called_with(a=1, b=2) - - # We create a custom model - class MyModel(pt.Model): - a: int - b: str - - # Some dummy data - dummy_df = MyModel.examples({"a": [1, 2], "b": ["one", "two"]}) - dummy_relation = db.to_relation(dummy_df) - - # Initially the relation has no custom model and it is dynamically constructed - assert dummy_relation.model is None - assert not isinstance( - dummy_relation.limit(1).get(), - MyModel, - ) - - # MyRow can be specified as the deserialization class with Relation.set_model() - assert isinstance( - dummy_relation.set_model(MyModel).limit(1).get(), - MyModel, - ) - - # A custom relation class which specifies this as the default model - class MyRelation(pt.duckdb.Relation): - model = MyModel - - assert isinstance( - MyRelation(dummy_relation._relation, database=db).limit(1).get(), - MyModel, - ) - - # But the model is "lost" when we use schema-changing methods - assert not isinstance( - dummy_relation.set_model(MyModel).limit(1).select("a").get(), - MyModel, - ) - - -def test_row_sql_type_functionality(): - """Tests for mapping pydantic types to DuckDB SQL types.""" - - # Two nullable and two non-nullable columns - class OptionalRow(pt.Model): - a: str - b: float - c: Optional[str] - d: Optional[bool] - - assert OptionalRow.non_nullable_columns == {"a", "b"} - assert OptionalRow.nullable_columns == {"c", "d"} - - # All different types of SQL types - class TypeModel(pt.Model): - a: str - b: int - c: float - d: Optional[bool] - - assert TypeModel.sql_types == { - "a": "VARCHAR", - "b": "INTEGER", - "c": "DOUBLE", - "d": "BOOLEAN", - } - - -def test_fill_missing_columns(): - """Tests for Relation.with_missing_{nullable,defaultable}_columns.""" - - class MyRow(pt.Model): - # This can't be filled - a: str - # This can be filled with default value - b: Optional[str] = "default_value" - # This can be filled with null - c: Optional[str] - # This can be filled with null, but will be set - d: Optional[float] - # This can befilled with null, but with a different type - e: Optional[bool] - - # We check if defaults are easily retrievable from the model - assert MyRow.defaults == {"b": "default_value"} - - db = pt.duckdb.Database() - df = pl.DataFrame({"a": ["mandatory"], "d": [10.5]}) - relation = db.to_relation(df).set_model(MyRow) - - # Missing nullable columns b, c, and e are filled in with nulls - filled_nullables = relation.with_missing_nullable_columns() - assert filled_nullables.set_model(None).get() == { - "a": "mandatory", - "b": None, - "c": None, - "d": 10.5, - "e": None, - } - # And these nulls are properly typed - assert filled_nullables.types == { - "a": "VARCHAR", - "b": "VARCHAR", - "c": "VARCHAR", - "d": "DOUBLE", - "e": "BOOLEAN", - } - - # Now we fill in the b column with "default_value" - filled_defaults = relation.with_missing_defaultable_columns() - assert filled_defaults.set_model(None).get().dict() == { - "a": "mandatory", - "b": "default_value", - "d": 10.5, - } - assert filled_defaults.types == { - "a": "VARCHAR", - "b": "VARCHAR", - "d": "DOUBLE", - } - - # We now exclude the b column from being filled with default values - excluded_default = relation.with_missing_defaultable_columns(exclude=["b"]) - assert excluded_default.set_model(None).get().dict() == { - "a": "mandatory", - "d": 10.5, - } - - # We can also specify that we only want to fill a subset - included_defualts = relation.with_missing_defaultable_columns(include=["b"]) - assert included_defualts.set_model(None).get().dict() == { - "a": "mandatory", - "b": "default_value", - "d": 10.5, - } - - # We now exclude column b and c from being filled with null values - excluded_nulls = relation.with_missing_nullable_columns(exclude=["b", "c"]) - assert excluded_nulls.set_model(None).get().dict() == { - "a": "mandatory", - "d": 10.5, - "e": None, - } - - # Only specify that we want to fill column e with nulls - included_nulls = relation.with_missing_nullable_columns(include=["e"]) - assert included_nulls.set_model(None).get().dict() == { - "a": "mandatory", - "d": 10.5, - "e": None, - } - - # We should raise if both include and exclude is specified - with pytest.raises( - TypeError, match="Both include and exclude provided at the same time!" - ): - relation.with_missing_nullable_columns(include={"x"}, exclude={"y"}) - - with pytest.raises( - TypeError, match="Both include and exclude provided at the same time!" - ): - relation.with_missing_defaultable_columns(include={"x"}, exclude={"y"}) - - -def test_with_missing_nullable_enum_columns(): - """It should produce enums with null values correctly.""" - - class EnumModel(pt.Model): - enum_column: Optional[Literal["a", "b", "c"]] - other_column: int - - db = pt.duckdb.Database() - - # We insert data into a properly typed table in order to get the correct enum type - db.create_table(name="enum_table", model=EnumModel) - db.to_relation("select 'a' as enum_column, 1 as other_column").insert_into( - table="enum_table" - ) - table_relation = db.table("enum_table") - assert str(table_relation.types["enum_column"]).startswith("enum__") - - # We generate another dynamic relation where we expect the correct enum type - null_relation = ( - db.to_relation("select 2 as other_column") - .set_model(EnumModel) - .with_missing_nullable_columns() - ) - assert null_relation.types["enum_column"] == table_relation.types["enum_column"] - - # These two relations should now be unionable - union_relation = (null_relation + table_relation).order("other_column asc") - assert union_relation.types["enum_column"] == table_relation.types["enum_column"] - - with pl.StringCache(): - correct_union_df = pl.DataFrame( - { - "other_column": [1, 2], - "enum_column": pl.Series(["a", None]).cast(pl.Categorical), - } - ) - assert union_relation.to_df().frame_equal(correct_union_df) - - -def test_with_missing_nullable_enum_columns_without_table(): - """It should produce enums with null values correctly without a table.""" - - class EnumModel(pt.Model): - enum_column_1: Optional[Literal["a", "b", "c"]] - enum_column_2: Optional[Literal["a", "b", "c"]] - other_column: int - - # We should be able to create the correct type without a table - db = pt.duckdb.Database() - relation = db.to_relation("select 1 as other_column") - with pytest.raises( - TypeError, match=r".*You should invoke Relation.set_model\(\) first!" - ): - relation.with_missing_nullable_columns() - - model_relation = relation.set_model(EnumModel).with_missing_nullable_columns() - assert str(model_relation.types["enum_column_1"]).startswith("enum__") - assert ( - model_relation.types["enum_column_2"] == model_relation.types["enum_column_1"] - ) - - # And now we should be able to insert it into a new table - model_relation.create_table(name="enum_table") - table_relation = db.table("enum_table") - assert ( - table_relation.types["enum_column_1"] == model_relation.types["enum_column_1"] - ) - assert ( - table_relation.types["enum_column_2"] == model_relation.types["enum_column_1"] - ) - - -def test_with_missing_defualtable_enum_columns(): - """It should produce enums with default values correctly typed.""" - - class EnumModel(pt.Model): - enum_column: Optional[Literal["a", "b", "c"]] = "a" - other_column: int - - db = pt.duckdb.Database() - relation = db.to_relation("select 1 as other_column") - with pytest.raises( - TypeError, - match=r".*You should invoke Relation.set_model\(\) first!", - ): - relation.with_missing_defaultable_columns() - - model_relation = relation.set_model(EnumModel).with_missing_defaultable_columns() - assert str(model_relation.types["enum_column"]).startswith("enum__") - - -def test_relation_insert_into(): - """Relation.insert_into() should automatically order columnns correctly.""" - db = pt.duckdb.Database() - db.execute( - """ - create table foo ( - a integer, - b integer - ) - """ - ) - db.to_relation("select 2 as b, 1 as a").insert_into(table="foo") - row = db.table("foo").get() - assert row.a == 1 - assert row.b == 2 - - with pytest.raises( - TypeError, - match=re.escape( - "Relation is missing column(s) {'a'} " - "in order to be inserted into table 'foo'!" - ), - ): - db.to_relation("select 2 as b, 1 as c").insert_into(table="foo") - - -def test_polars_support(): - # Test converting a polars DataFrame to patito relation - df = pl.DataFrame(data={"column_1": ["a", "b", None], "column_2": [1, 2, None]}) - correct_dtypes = [pl.Utf8, pl.Int64] - assert df.dtypes == correct_dtypes - db = pt.duckdb.Database() - relation = db.to_relation(df) - assert relation.get(column_1="a").column_2 == 1 - - # Test converting back again the other way - roundtrip_df = relation.to_df() - assert roundtrip_df.frame_equal(df) - assert roundtrip_df.dtypes == correct_dtypes - - # Assert that .to_df() always returns a DataFrame. - assert isinstance(relation["column_1"].to_df(), pl.DataFrame) - - # Assert that .to_df() returns an empty DataFrame when the table has no rows - empty_dataframe = relation.filter(column_1="missing-column").to_df() - # assert empty_dataframe == pl.DataFrame(columns=["column_1", "column_2"]) - # assert empty_dataframe.frame_equal(pl.DataFrame(columns=["column_1", "column_2"])) - - # The datatype should be preserved - assert empty_dataframe.dtypes == correct_dtypes - - # A model should be able to be instantiated with a polars row - class MyModel(pt.Model): - a: int - b: str - - my_model_df = pl.DataFrame({"a": [1, 2], "b": ["x", "y"]}) - with pytest.raises( - ValueError, - match=r"MyModel._from_polars\(\) can only be invoked with exactly 1 row.*", - ): - MyModel.from_row(my_model_df) - - my_model = MyModel.from_row(my_model_df.head(1)) - assert my_model.a == 1 - assert my_model.b == "x" - - # Anything besides a polars dataframe should raise TypeError - with pytest.raises(TypeError): - MyModel.from_row(None) # pyright: ignore - - # But we can also skip validation if we want - unvalidated_model = MyModel.from_row( - pl.DataFrame().with_columns( - [ - pl.lit("string").alias("a"), - pl.lit(2).alias("b"), - ] - ), - validate=False, - ) - assert unvalidated_model.a == "string" - assert unvalidated_model.b == 2 - - -def test_series_vs_dataframe_behavior(): - """Test Relation.to_series().""" - db = pt.duckdb.Database() - relation = db.to_relation("select 1 as column_1, 2 as column_2") - - # Selecting multiple columns should yield a DataFrame - assert isinstance(relation[["column_1", "column_2"]].to_df(), pl.DataFrame) - - # Selecting a single column, but as an item in a list, should yield a DataFrame - assert isinstance(relation[["column_1"]].to_df(), pl.DataFrame) - - # Selecting a single column as a string should also yield a DataFrame - assert isinstance(relation["column_1"].to_df(), pl.DataFrame) - - # But .to_series() should yield a series - series = relation["column_1"].to_series() - assert isinstance(series, pl.Series) - - # The name should also be set correctly - assert series.name == "column_1" - - # And the content should be correct - correct_series = pl.Series([1], dtype=pl.Int32).alias("column_1") - assert series.series_equal(correct_series) - - # To series will raise a type error if invoked with anything other than 1 column - with pytest.raises(TypeError, match=r".*2 columns, while exactly 1 is required.*"): - relation.to_series() - - -def test_converting_enum_column_to_polars(): - """Enum types should be convertible to polars categoricals.""" - - class EnumModel(pt.Model): - enum_column: Literal["a", "b", "c"] - - db = pt.duckdb.Database() - db.create_table(name="enum_table", model=EnumModel) - db.execute( - """ - insert into enum_table - (enum_column) - values - ('a'), - ('a'), - ('b'); - """ - ) - enum_df = db.table("enum_table").to_df() - assert enum_df.frame_equal(pl.DataFrame({"enum_column": ["a", "a", "b"]})) - assert enum_df.dtypes == [pl.Categorical] - - -def test_non_string_enum(): - """It should handle other types than just string enums.""" - - class EnumModel(pt.Model): - enum_column: Literal[10, 11, 12] - - db = pt.duckdb.Database() - db.create_table(name="enum_table", model=EnumModel) - - db.execute( - """ - insert into enum_table - (enum_column) - values - (10), - (11), - (12); - """ - ) - enum_df = db.table("enum_table").to_df() - assert enum_df.frame_equal(pl.DataFrame({"enum_column": [10, 11, 12]})) - assert enum_df.dtypes == [pl.Int64] - - -def test_multiple_filters(): - """The filter method should AND multiple filters properly.""" - db = pt.duckdb.Database() - relation = db.to_relation("select 1 as a, 2 as b") - # The logical or should not make the filter valid for our row - assert relation.filter("(1 = 2) or b = 2", a=0).count() == 0 - assert relation.filter("a=0", "(1 = 2) or b = 2").count() == 0 - - -def test_no_filter(): - """No filters should return all rows.""" - db = pt.duckdb.Database() - relation = db.to_relation("select 1 as a, 2 as b") - # The logical or should not make the filter valid for our row - assert relation.filter().count() - - -def test_string_representation_of_relation(): - """It should have a string representation.""" - relation = pt.duckdb.Relation("select 1 as my_column") - relation_str = str(relation) - assert "my_column" in relation_str - - -def test_cast(): - """It should be able to cast to the correct SQL types based on model.""" - - class Schema(pt.Model): - float_column: float - - relation = pt.duckdb.Relation("select 1 as float_column, 2 as other_column") - with pytest.raises( - TypeError, - match=( - r"Relation\.cast\(\) invoked without Relation.model having been set\! " - r"You should invoke Relation\.set_model\(\) first or explicitly provide " - r"a model to \.cast\(\)." - ), - ): - relation.cast() - - # Originally the type of both columns are integers - modeled_relation = relation.set_model(Schema) - assert modeled_relation.types["float_column"] == "INTEGER" - assert modeled_relation.types["other_column"] == "INTEGER" - - # The casted variant has converted the float column to double - casted_relation = relation.set_model(Schema).cast() - assert casted_relation.types["float_column"] == "DOUBLE" - # But kept the other as-is - assert casted_relation.types["other_column"] == "INTEGER" - - # You can either set the model with .set_model() or provide it to cast - assert ( - relation.set_model(Schema) - .cast() - .to_df() - .frame_equal(relation.cast(Schema).to_df()) - ) - - # Other types that should be considered compatible should be kept as-is - compatible_relation = pt.duckdb.Relation("select 1::FLOAT as float_column") - assert compatible_relation.cast(Schema).types["float_column"] == "FLOAT" - - # Unless the strict parameter is specified - assert ( - compatible_relation.cast(Schema, strict=True).types["float_column"] == "DOUBLE" - ) - - # We can also specify a specific SQL type - class SpecificSQLTypeSchema(pt.Model): - float_column: float = pt.Field(sql_type="BIGINT") - - specific_cast_relation = relation.set_model(SpecificSQLTypeSchema).cast() - assert specific_cast_relation.types["float_column"] == "BIGINT" - - # Unknown types raise - class ObjectModel(pt.Model): - object_column: object - - with pytest.raises( - NotImplementedError, - match=r"No valid sql_type mapping found for column 'object_column'\.", - ): - pt.duckdb.Relation("select 1 as object_column").set_model(ObjectModel).cast() - - # Check for more specific type annotations - class TotalModel(pt.Model): - timedelta_column: timedelta - date_column: date - null_column: None - - df = pt.DataFrame( - { - "date_column": [date(2022, 9, 4)], - "null_column": [None], - } - ) - casted_relation = pt.duckdb.Relation(df, model=TotalModel).cast() - assert casted_relation.types == { - "date_column": "DATE", - "null_column": "INTEGER", - } - assert casted_relation.to_df().frame_equal(df) - - # It is possible to only cast a subset - class MyModel(pt.Model): - column_1: float - column_2: float - - relation = pt.duckdb.Relation("select 1 as column_1, 2 as column_2").set_model( - MyModel - ) - assert relation.cast(include=[]).types == { - "column_1": "INTEGER", - "column_2": "INTEGER", - } - assert relation.cast(include=["column_1"]).types == { - "column_1": "DOUBLE", - "column_2": "INTEGER", - } - assert relation.cast(include=["column_1", "column_2"]).types == { - "column_1": "DOUBLE", - "column_2": "DOUBLE", - } - - assert relation.cast(exclude=[]).types == { - "column_1": "DOUBLE", - "column_2": "DOUBLE", - } - assert relation.cast(exclude=["column_1"]).types == { - "column_1": "INTEGER", - "column_2": "DOUBLE", - } - assert relation.cast(exclude=["column_1", "column_2"]).types == { - "column_1": "INTEGER", - "column_2": "INTEGER", - } - - # Providing both include and exclude should raise a value error - with pytest.raises( - ValueError, - match=r"Both include and exclude provided to Relation.cast\(\)\!", - ): - relation.cast(include=["column_1"], exclude=["column_2"]) - - -@pytest.mark.xfail(strict=True) -def test_casting_timedelta_column_back_and_forth(): - class TotalModel(pt.Model): - timedelta_column: timedelta - date_column: date - null_column: None - - df = pt.DataFrame( - { - "timedelta_column": [timedelta(seconds=90)], - "date_column": [date(2022, 9, 4)], - "null_column": [None], - } - ) - casted_relation = pt.duckdb.Relation(df, model=TotalModel).cast() - assert casted_relation.types == { - "timedelta_column": "INTERVAL", - "date_column": "DATE", - "null_column": "INTEGER", - } - assert casted_relation.to_df().frame_equal(df) diff --git a/tests/test_model.py b/tests/test_model.py index 3c609bd..22e7ead 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -3,7 +3,7 @@ import enum import re from datetime import date, datetime, timedelta -from typing import List, Literal, Optional, Type +from typing import Literal, Optional, Type import patito as pt import polars as pl From 155ce1f59af096a358c6df7d89913b89a0356109 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Wed, 7 Feb 2024 12:02:00 -0500 Subject: [PATCH 22/29] chore: more sql cleanup --- pyproject.toml | 2 - src/patito/pydantic.py | 169 +---------------------------------------- src/patito/sql.py | 88 --------------------- 3 files changed, 1 insertion(+), 258 deletions(-) delete mode 100644 src/patito/sql.py diff --git a/pyproject.toml b/pyproject.toml index 4465839..814279f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,6 @@ polars = ">=0.20.0" # Required for typing.Literal in python3.7 typing-extensions = "*" pandas = {version = "*", optional = true, python = "^3.8"} -duckdb = {version = ">=0.6.0", optional = true, python = "^3.8"} pyarrow = {version = ">=5.0.0", optional = true, python = "^3.8"} # Optional docs dependencies Sphinx = {version = "<7", optional = true} @@ -30,7 +29,6 @@ sphinxcontrib-mermaid = {version = "*", optional = true} [tool.poetry.extras] # The pyarrow.parquet module is required for writing parquet caches to disk caching = ["pyarrow"] -duckdb = ["duckdb", "pyarrow"] pandas = ["pandas"] docs = [ "Sphinx", diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index 2da71d4..e511f5f 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -54,7 +54,6 @@ if TYPE_CHECKING: import patito.polars - from patito.duckdb import DuckDBSQLType # The generic type of a single row in given Relation. # Should be a typed subclass of Model. @@ -242,173 +241,7 @@ def valid_dtypes( # type: ignore else frozenset({cls.column_infos[column].dtype}) for column in cls.columns } # pyright: ignore - - @property # TODO deprecate? - def valid_sql_types( # type: ignore # noqa: C901 - cls: Type[ModelType], # pyright: ignore - ) -> dict[str, List["DuckDBSQLType"]]: - """ - Return a list of DuckDB SQL types which Patito considers valid for each field. - - The first item of each list is the default dtype chosen by Patito. - - Returns: - A dictionary mapping each column string name to a list of DuckDB SQL types - represented as strings. - - Raises: - NotImplementedError: If one or more model fields are annotated with types - not compatible with DuckDB. - - Example: - >>> import patito as pt - >>> from pprint import pprint - - >>> class MyModel(pt.Model): - ... bool_column: bool - ... str_column: str - ... int_column: int - ... float_column: float - ... - >>> pprint(MyModel.valid_sql_types) - {'bool_column': ['BOOLEAN', 'BOOL', 'LOGICAL'], - 'float_column': ['DOUBLE', - 'FLOAT8', - 'NUMERIC', - 'DECIMAL', - 'REAL', - 'FLOAT4', - 'FLOAT'], - 'int_column': ['INTEGER', - 'INT4', - 'INT', - 'SIGNED', - 'BIGINT', - 'INT8', - 'LONG', - 'HUGEINT', - 'SMALLINT', - 'INT2', - 'SHORT', - 'TINYINT', - 'INT1', - 'UBIGINT', - 'UINTEGER', - 'USMALLINT', - 'UTINYINT'], - 'str_column': ['VARCHAR', 'CHAR', 'BPCHAR', 'TEXT', 'STRING']} - """ - valid_dtypes: Dict[str, List["DuckDBSQLType"]] = {} - for column, props in cls._schema_properties().items(): - if "sql_type" in props: - valid_dtypes[column] = [ - props["sql_type"], - ] - elif "enum" in props and props["type"] == "string": - from patito.duckdb import _enum_type_name - - # fmt: off - valid_dtypes[column] = [ # pyright: ignore - _enum_type_name(field_properties=props), # type: ignore - "VARCHAR", "CHAR", "BPCHAR", "TEXT", "STRING", - ] - # fmt: on - elif "type" not in props: - raise NotImplementedError( - f"No valid sql_type mapping found for column '{column}'." - ) - elif props["type"] == "integer": - # fmt: off - valid_dtypes[column] = [ - "INTEGER", "INT4", "INT", "SIGNED", - "BIGINT", "INT8", "LONG", - "HUGEINT", - "SMALLINT", "INT2", "SHORT", - "TINYINT", "INT1", - "UBIGINT", - "UINTEGER", - "USMALLINT", - "UTINYINT", - ] - # fmt: on - elif props["type"] == "number": - if props.get("format") == "time-delta": - valid_dtypes[column] = [ - "INTERVAL", - ] - else: - # fmt: off - valid_dtypes[column] = [ - "DOUBLE", "FLOAT8", "NUMERIC", "DECIMAL", - "REAL", "FLOAT4", "FLOAT", - ] - # fmt: on - elif props["type"] == "boolean": - # fmt: off - valid_dtypes[column] = [ - "BOOLEAN", "BOOL", "LOGICAL", - ] - # fmt: on - elif props["type"] == "string": - string_format = props.get("format") - if string_format is None: - # fmt: off - valid_dtypes[column] = [ - "VARCHAR", "CHAR", "BPCHAR", "TEXT", "STRING", - ] - # fmt: on - elif string_format == "date": - valid_dtypes[column] = ["DATE"] - # TODO: Find out why this branch is not being hit - elif string_format == "date-time": # pragma: no cover - # fmt: off - valid_dtypes[column] = [ - "TIMESTAMP", "DATETIME", - "TIMESTAMP WITH TIMEZONE", "TIMESTAMPTZ", - ] - # fmt: on - elif props["type"] == "null": - valid_dtypes[column] = [ - "INTEGER", - ] - else: # pragma: no cover - raise NotImplementedError( - f"No valid sql_type mapping found for column '{column}'" - ) - - return valid_dtypes - - @property - def sql_types( # type: ignore - cls: Type[ModelType], # pyright: ignore - ) -> dict[str, str]: - """ - Return compatible DuckDB SQL types for all model fields. - - Returns: - Dictionary with column name keys and SQL type identifier strings. - - Example: - >>> from typing import Literal - >>> import patito as pt - - >>> class MyModel(pt.Model): - ... int_column: int - ... str_column: str - ... float_column: float - ... literal_column: Literal["a", "b", "c"] - ... - >>> MyModel.sql_types - {'int_column': 'INTEGER', - 'str_column': 'VARCHAR', - 'float_column': 'DOUBLE', - 'literal_column': 'enum__4a496993dde04060df4e15a340651b45'} - """ - return { - column: valid_types[0] - for column, valid_types in cls.valid_sql_types.items() - } - + @property def defaults( # type: ignore cls: Type[ModelType], # pyright: ignore diff --git a/src/patito/sql.py b/src/patito/sql.py deleted file mode 100644 index a52001a..0000000 --- a/src/patito/sql.py +++ /dev/null @@ -1,88 +0,0 @@ -"""Module containing SQL generation utilities.""" -from typing import Dict, Optional, Union - -from typing_extensions import TypeAlias - -SQLLiteral: TypeAlias = Union[str, float, int, None] - - -def sql_repr(value: SQLLiteral) -> str: - """ - Convert python value to equivalent SQL literal value representation. - - Args: - value: Python object which is convertible to an equivalent SQL value type. - - Returns: - A SQL literal representation of the given value as a string. - """ - return "null" if value is None else repr(value) - - -class Case: - """Class representing an SQL case statement.""" - - def __init__( - self, - on_column: str, - mapping: Dict[SQLLiteral, SQLLiteral], - default: SQLLiteral, - as_column: Optional[str] = None, - ) -> None: - """ - Map values of one column over to a new column. - - Args: - on_column: Name of column defining the domain of the mapping. - mapping: Dictionary defining the mapping. The dictionary keys represent the - input values, while the dictionary values represent the output values. - Items are inserted into the SQL case statement by their repr() string - value. None is converted to SQL NULL. - default: Default output value for inputs which have no provided mapping. - If set to None, SQL NULL will be inserted as the default value. - as_column: Name of column to insert the mapped values into. If not provided - the SQL string expression will not end with "AS ". - - Examples: - >>> import patito as pt - >>> db = pt.duckdb.Database() - >>> relation = db.to_relation("select 1 as a union select 2 as a") - >>> case_statement = pt.sql.Case( - ... on_column="a", - ... mapping={1: "one", 2: "two"}, - ... default="three", - ... as_column="b", - ... ) - >>> relation.select(f"*, {case_statement}").order(by="a").to_df() - shape: (2, 2) - ┌─────┬─────┐ - │ a ┆ b │ - │ --- ┆ --- │ - │ i64 ┆ str │ - ╞═════╪═════╡ - │ 1 ┆ one │ - │ 2 ┆ two │ - └─────┴─────┘ - """ - self.on_column = on_column - self.as_column = as_column - self.mapping = { - sql_repr(key): sql_repr(value) for key, value in mapping.items() - } - self.default_value = sql_repr(default) - self.sql_string = f"case {self.on_column} " + ( - " ".join(f"when {key} then {value}" for key, value in self.mapping.items()) - + f" else {self.default_value} end" - ) - if self.as_column: - self.sql_string += f" as {as_column}" - - def __str__(self) -> str: - """ - Return string representation of SQL case statement. - - Returns: - String representing the case expression which can be directly inserted into - an SQL query. - """ - return self.sql_string From 6221dd7c512bbb1f41e4f6ae0873a355e4228be3 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Thu, 8 Feb 2024 12:04:16 -0500 Subject: [PATCH 23/29] chore: more migration/refactor - more db cleanup - column info -> _pydantic internals - DTypeResolver - cached schema helpers -> _pydantic internals - (polars) derive, cast -> LDF - validate_schema as classmethod - time/timedelta -> example generators --- .gitignore | 3 +- poetry.lock | 1047 +++++++++++------------ pyproject.toml | 5 +- src/patito/__init__.py | 21 +- src/patito/_pydantic/__init__.py | 1 + src/patito/_pydantic/column_info.py | 75 ++ src/patito/_pydantic/dtypes.py | 356 -------- src/patito/_pydantic/dtypes/__init__.py | 23 + src/patito/_pydantic/dtypes/dtypes.py | 248 ++++++ src/patito/_pydantic/dtypes/utils.py | 202 +++++ src/patito/_pydantic/repr.py | 3 +- src/patito/_pydantic/schema.py | 96 +++ src/patito/exceptions.py | 2 +- src/patito/polars.py | 117 ++- src/patito/pydantic.py | 300 +++---- src/patito/validators.py | 77 +- tests/examples.py | 54 ++ tests/test_dtypes.py | 185 ++-- tests/test_dummy_data.py | 18 +- tests/test_model.py | 98 ++- tests/test_validators.py | 10 +- 21 files changed, 1642 insertions(+), 1299 deletions(-) create mode 100644 src/patito/_pydantic/column_info.py delete mode 100644 src/patito/_pydantic/dtypes.py create mode 100644 src/patito/_pydantic/dtypes/__init__.py create mode 100644 src/patito/_pydantic/dtypes/dtypes.py create mode 100644 src/patito/_pydantic/dtypes/utils.py create mode 100644 src/patito/_pydantic/schema.py create mode 100644 tests/examples.py diff --git a/.gitignore b/.gitignore index 138ab5c..c051d58 100644 --- a/.gitignore +++ b/.gitignore @@ -133,4 +133,5 @@ dmypy.json # Pyre type checker .pyre/ -.vscode/ \ No newline at end of file +.vscode/ +.DS_Store \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 9c906ab..341460c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -48,13 +48,13 @@ limiter = ["cachecontrol[filecache] (>=0.12.6)", "lockfile (>=0.12.2)"] [[package]] name = "apeye-core" -version = "1.1.4" +version = "1.1.5" description = "Core (offline) functionality for the apeye library." optional = true python-versions = ">=3.6.1" files = [ - {file = "apeye_core-1.1.4-py3-none-any.whl", hash = "sha256:084bc696448d3ac428fece41c1f2eb08fa9d9ce1d1b2f4d43187e3def4528a60"}, - {file = "apeye_core-1.1.4.tar.gz", hash = "sha256:72bb89fed3baa647cb81aa28e1d851787edcbf9573853b5d2b5f87c02f50eaf5"}, + {file = "apeye_core-1.1.5-py3-none-any.whl", hash = "sha256:dc27a93f8c9e246b3b238c5ea51edf6115ab2618ef029b9f2d9a190ec8228fbf"}, + {file = "apeye_core-1.1.5.tar.gz", hash = "sha256:5de72ed3d00cc9b20fea55e54b7ab8f5ef8500eb33a5368bc162a5585e238a55"}, ] [package.dependencies] @@ -63,31 +63,32 @@ idna = ">=2.5" [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "autodocsumm" -version = "0.2.11" +version = "0.2.12" description = "Extended sphinx autodoc including automatic autosummaries" optional = true python-versions = ">=3.7" files = [ - {file = "autodocsumm-0.2.11-py3-none-any.whl", hash = "sha256:f1d0a623bf1ad64d979a9e23fd360d1fb1b8f869beaf3197f711552cddc174e2"}, - {file = "autodocsumm-0.2.11.tar.gz", hash = "sha256:183212bd9e9f3b58a96bb21b7958ee4e06224107aa45b2fd894b61b83581b9a9"}, + {file = "autodocsumm-0.2.12-py3-none-any.whl", hash = "sha256:b842b53c686c07a4f174721ca4e729b027367703dbf42e2508863a3c6d6c049c"}, + {file = "autodocsumm-0.2.12.tar.gz", hash = "sha256:848fe8c38df433c6635489499b969cb47cc389ed3d7b6e75c8ccbc94d4b3bf9e"}, ] [package.dependencies] @@ -112,74 +113,77 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "bandit" -version = "1.7.6" +version = "1.7.7" description = "Security oriented static analyser for python code." optional = false python-versions = ">=3.8" files = [ - {file = "bandit-1.7.6-py3-none-any.whl", hash = "sha256:36da17c67fc87579a5d20c323c8d0b1643a890a2b93f00b3d1229966624694ff"}, - {file = "bandit-1.7.6.tar.gz", hash = "sha256:72ce7bc9741374d96fb2f1c9a8960829885f1243ffde743de70a19cee353e8f3"}, + {file = "bandit-1.7.7-py3-none-any.whl", hash = "sha256:17e60786a7ea3c9ec84569fd5aee09936d116cb0cb43151023258340dbffb7ed"}, + {file = "bandit-1.7.7.tar.gz", hash = "sha256:527906bec6088cb499aae31bc962864b4e77569e9d529ee51df3a93b4b8ab28a"}, ] [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=3.1.30" PyYAML = ">=5.3.1" rich = "*" stevedore = ">=1.20.0" [package.extras] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "tomli (>=1.1.0)"] +baseline = ["GitPython (>=3.1.30)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] toml = ["tomli (>=1.1.0)"] yaml = ["PyYAML"] [[package]] name = "beautifulsoup4" -version = "4.12.2" +version = "4.12.3" description = "Screen-scraping library" optional = true python-versions = ">=3.6.0" files = [ - {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, - {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, ] [package.dependencies] soupsieve = ">1.2" [package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] [[package]] name = "black" -version = "23.12.0" +version = "24.1.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67f19562d367468ab59bd6c36a72b2c84bc2f16b59788690e02bbcb140a77175"}, - {file = "black-23.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbd75d9f28a7283b7426160ca21c5bd640ca7cd8ef6630b4754b6df9e2da8462"}, - {file = "black-23.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:593596f699ca2dcbbbdfa59fcda7d8ad6604370c10228223cd6cf6ce1ce7ed7e"}, - {file = "black-23.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:12d5f10cce8dc27202e9a252acd1c9a426c83f95496c959406c96b785a92bb7d"}, - {file = "black-23.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e73c5e3d37e5a3513d16b33305713237a234396ae56769b839d7c40759b8a41c"}, - {file = "black-23.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba09cae1657c4f8a8c9ff6cfd4a6baaf915bb4ef7d03acffe6a2f6585fa1bd01"}, - {file = "black-23.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace64c1a349c162d6da3cef91e3b0e78c4fc596ffde9413efa0525456148873d"}, - {file = "black-23.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:72db37a2266b16d256b3ea88b9affcdd5c41a74db551ec3dd4609a59c17d25bf"}, - {file = "black-23.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fdf6f23c83078a6c8da2442f4d4eeb19c28ac2a6416da7671b72f0295c4a697b"}, - {file = "black-23.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39dda060b9b395a6b7bf9c5db28ac87b3c3f48d4fdff470fa8a94ab8271da47e"}, - {file = "black-23.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7231670266ca5191a76cb838185d9be59cfa4f5dd401b7c1c70b993c58f6b1b5"}, - {file = "black-23.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:193946e634e80bfb3aec41830f5d7431f8dd5b20d11d89be14b84a97c6b8bc75"}, - {file = "black-23.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bcf91b01ddd91a2fed9a8006d7baa94ccefe7e518556470cf40213bd3d44bbbc"}, - {file = "black-23.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:996650a89fe5892714ea4ea87bc45e41a59a1e01675c42c433a35b490e5aa3f0"}, - {file = "black-23.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdbff34c487239a63d86db0c9385b27cdd68b1bfa4e706aa74bb94a435403672"}, - {file = "black-23.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:97af22278043a6a1272daca10a6f4d36c04dfa77e61cbaaf4482e08f3640e9f0"}, - {file = "black-23.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ead25c273adfad1095a8ad32afdb8304933efba56e3c1d31b0fee4143a1e424a"}, - {file = "black-23.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c71048345bdbced456cddf1622832276d98a710196b842407840ae8055ade6ee"}, - {file = "black-23.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a832b6e00eef2c13b3239d514ea3b7d5cc3eaa03d0474eedcbbda59441ba5d"}, - {file = "black-23.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:6a82a711d13e61840fb11a6dfecc7287f2424f1ca34765e70c909a35ffa7fb95"}, - {file = "black-23.12.0-py3-none-any.whl", hash = "sha256:a7c07db8200b5315dc07e331dda4d889a56f6bf4db6a9c2a526fa3166a81614f"}, - {file = "black-23.12.0.tar.gz", hash = "sha256:330a327b422aca0634ecd115985c1c7fd7bdb5b5a2ef8aa9888a82e2ebe9437a"}, + {file = "black-24.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2588021038bd5ada078de606f2a804cadd0a3cc6a79cb3e9bb3a8bf581325a4c"}, + {file = "black-24.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a95915c98d6e32ca43809d46d932e2abc5f1f7d582ffbe65a5b4d1588af7445"}, + {file = "black-24.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa6a0e965779c8f2afb286f9ef798df770ba2b6cee063c650b96adec22c056a"}, + {file = "black-24.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5242ecd9e990aeb995b6d03dc3b2d112d4a78f2083e5a8e86d566340ae80fec4"}, + {file = "black-24.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fc1ec9aa6f4d98d022101e015261c056ddebe3da6a8ccfc2c792cbe0349d48b7"}, + {file = "black-24.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0269dfdea12442022e88043d2910429bed717b2d04523867a85dacce535916b8"}, + {file = "black-24.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d64db762eae4a5ce04b6e3dd745dcca0fb9560eb931a5be97472e38652a161"}, + {file = "black-24.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5d7b06ea8816cbd4becfe5f70accae953c53c0e53aa98730ceccb0395520ee5d"}, + {file = "black-24.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e2c8dfa14677f90d976f68e0c923947ae68fa3961d61ee30976c388adc0b02c8"}, + {file = "black-24.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a21725862d0e855ae05da1dd25e3825ed712eaaccef6b03017fe0853a01aa45e"}, + {file = "black-24.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07204d078e25327aad9ed2c64790d681238686bce254c910de640c7cc4fc3aa6"}, + {file = "black-24.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:a83fe522d9698d8f9a101b860b1ee154c1d25f8a82ceb807d319f085b2627c5b"}, + {file = "black-24.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08b34e85170d368c37ca7bf81cf67ac863c9d1963b2c1780c39102187ec8dd62"}, + {file = "black-24.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7258c27115c1e3b5de9ac6c4f9957e3ee2c02c0b39222a24dc7aa03ba0e986f5"}, + {file = "black-24.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40657e1b78212d582a0edecafef133cf1dd02e6677f539b669db4746150d38f6"}, + {file = "black-24.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e298d588744efda02379521a19639ebcd314fba7a49be22136204d7ed1782717"}, + {file = "black-24.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34afe9da5056aa123b8bfda1664bfe6fb4e9c6f311d8e4a6eb089da9a9173bf9"}, + {file = "black-24.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:854c06fb86fd854140f37fb24dbf10621f5dab9e3b0c29a690ba595e3d543024"}, + {file = "black-24.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3897ae5a21ca132efa219c029cce5e6bfc9c3d34ed7e892113d199c0b1b444a2"}, + {file = "black-24.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:ecba2a15dfb2d97105be74bbfe5128bc5e9fa8477d8c46766505c1dda5883aac"}, + {file = "black-24.1.1-py3-none-any.whl", hash = "sha256:5cdc2e2195212208fbcae579b931407c1fa9997584f0a415421748aeafff1168"}, + {file = "black-24.1.1.tar.gz", hash = "sha256:48b5760dcbfe5cf97fd4fba23946681f3a81514c6ab8a45b50da67ac8fbc6c7b"}, ] [package.dependencies] @@ -217,34 +221,34 @@ tomli = "*" [[package]] name = "cachecontrol" -version = "0.13.1" +version = "0.14.0" description = "httplib2 caching for requests" optional = true python-versions = ">=3.7" files = [ - {file = "cachecontrol-0.13.1-py3-none-any.whl", hash = "sha256:95dedbec849f46dda3137866dc28b9d133fc9af55f5b805ab1291833e4457aa4"}, - {file = "cachecontrol-0.13.1.tar.gz", hash = "sha256:f012366b79d2243a6118309ce73151bf52a38d4a5dac8ea57f09bd29087e506b"}, + {file = "cachecontrol-0.14.0-py3-none-any.whl", hash = "sha256:f5bf3f0620c38db2e5122c0726bdebb0d16869de966ea6a2befe92470b740ea0"}, + {file = "cachecontrol-0.14.0.tar.gz", hash = "sha256:7db1195b41c81f8274a7bbd97c956f44e8348265a1bc7641c37dfebc39f0c938"}, ] [package.dependencies] filelock = {version = ">=3.8.0", optional = true, markers = "extra == \"filecache\""} -msgpack = ">=0.5.2" +msgpack = ">=0.5.2,<2.0.0" requests = ">=2.16.0" [package.extras] -dev = ["CacheControl[filecache,redis]", "black", "build", "cherrypy", "mypy", "pytest", "pytest-cov", "sphinx", "tox", "types-redis", "types-requests"] +dev = ["CacheControl[filecache,redis]", "black", "build", "cherrypy", "furo", "mypy", "pytest", "pytest-cov", "sphinx", "sphinx-copybutton", "tox", "types-redis", "types-requests"] filecache = ["filelock (>=3.8.0)"] redis = ["redis (>=2.10.5)"] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -388,63 +392,63 @@ files = [ [[package]] name = "coverage" -version = "7.3.3" +version = "7.4.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d874434e0cb7b90f7af2b6e3309b0733cde8ec1476eb47db148ed7deeb2a9494"}, - {file = "coverage-7.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6621dccce8af666b8c4651f9f43467bfbf409607c604b840b78f4ff3619aeb"}, - {file = "coverage-7.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1367aa411afb4431ab58fd7ee102adb2665894d047c490649e86219327183134"}, - {file = "coverage-7.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f0f8f0c497eb9c9f18f21de0750c8d8b4b9c7000b43996a094290b59d0e7523"}, - {file = "coverage-7.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db0338c4b0951d93d547e0ff8d8ea340fecf5885f5b00b23be5aa99549e14cfd"}, - {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d31650d313bd90d027f4be7663dfa2241079edd780b56ac416b56eebe0a21aab"}, - {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9437a4074b43c177c92c96d051957592afd85ba00d3e92002c8ef45ee75df438"}, - {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9e17d9cb06c13b4f2ef570355fa45797d10f19ca71395910b249e3f77942a837"}, - {file = "coverage-7.3.3-cp310-cp310-win32.whl", hash = "sha256:eee5e741b43ea1b49d98ab6e40f7e299e97715af2488d1c77a90de4a663a86e2"}, - {file = "coverage-7.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:593efa42160c15c59ee9b66c5f27a453ed3968718e6e58431cdfb2d50d5ad284"}, - {file = "coverage-7.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c944cf1775235c0857829c275c777a2c3e33032e544bcef614036f337ac37bb"}, - {file = "coverage-7.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eda7f6e92358ac9e1717ce1f0377ed2b9320cea070906ece4e5c11d172a45a39"}, - {file = "coverage-7.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c854c1d2c7d3e47f7120b560d1a30c1ca221e207439608d27bc4d08fd4aeae8"}, - {file = "coverage-7.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:222b038f08a7ebed1e4e78ccf3c09a1ca4ac3da16de983e66520973443b546bc"}, - {file = "coverage-7.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff4800783d85bff132f2cc7d007426ec698cdce08c3062c8d501ad3f4ea3d16c"}, - {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fc200cec654311ca2c3f5ab3ce2220521b3d4732f68e1b1e79bef8fcfc1f2b97"}, - {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:307aecb65bb77cbfebf2eb6e12009e9034d050c6c69d8a5f3f737b329f4f15fb"}, - {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ffb0eacbadb705c0a6969b0adf468f126b064f3362411df95f6d4f31c40d31c1"}, - {file = "coverage-7.3.3-cp311-cp311-win32.whl", hash = "sha256:79c32f875fd7c0ed8d642b221cf81feba98183d2ff14d1f37a1bbce6b0347d9f"}, - {file = "coverage-7.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:243576944f7c1a1205e5cd658533a50eba662c74f9be4c050d51c69bd4532936"}, - {file = "coverage-7.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a2ac4245f18057dfec3b0074c4eb366953bca6787f1ec397c004c78176a23d56"}, - {file = "coverage-7.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f9191be7af41f0b54324ded600e8ddbcabea23e1e8ba419d9a53b241dece821d"}, - {file = "coverage-7.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31c0b1b8b5a4aebf8fcd227237fc4263aa7fa0ddcd4d288d42f50eff18b0bac4"}, - {file = "coverage-7.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee453085279df1bac0996bc97004771a4a052b1f1e23f6101213e3796ff3cb85"}, - {file = "coverage-7.3.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1191270b06ecd68b1d00897b2daddb98e1719f63750969614ceb3438228c088e"}, - {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:007a7e49831cfe387473e92e9ff07377f6121120669ddc39674e7244350a6a29"}, - {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:af75cf83c2d57717a8493ed2246d34b1f3398cb8a92b10fd7a1858cad8e78f59"}, - {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:811ca7373da32f1ccee2927dc27dc523462fd30674a80102f86c6753d6681bc6"}, - {file = "coverage-7.3.3-cp312-cp312-win32.whl", hash = "sha256:733537a182b5d62184f2a72796eb6901299898231a8e4f84c858c68684b25a70"}, - {file = "coverage-7.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:e995efb191f04b01ced307dbd7407ebf6e6dc209b528d75583277b10fd1800ee"}, - {file = "coverage-7.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbd8a5fe6c893de21a3c6835071ec116d79334fbdf641743332e442a3466f7ea"}, - {file = "coverage-7.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:50c472c1916540f8b2deef10cdc736cd2b3d1464d3945e4da0333862270dcb15"}, - {file = "coverage-7.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e9223a18f51d00d3ce239c39fc41410489ec7a248a84fab443fbb39c943616c"}, - {file = "coverage-7.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f501e36ac428c1b334c41e196ff6bd550c0353c7314716e80055b1f0a32ba394"}, - {file = "coverage-7.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:475de8213ed95a6b6283056d180b2442eee38d5948d735cd3d3b52b86dd65b92"}, - {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:afdcc10c01d0db217fc0a64f58c7edd635b8f27787fea0a3054b856a6dff8717"}, - {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fff0b2f249ac642fd735f009b8363c2b46cf406d3caec00e4deeb79b5ff39b40"}, - {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a1f76cfc122c9e0f62dbe0460ec9cc7696fc9a0293931a33b8870f78cf83a327"}, - {file = "coverage-7.3.3-cp38-cp38-win32.whl", hash = "sha256:757453848c18d7ab5d5b5f1827293d580f156f1c2c8cef45bfc21f37d8681069"}, - {file = "coverage-7.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:ad2453b852a1316c8a103c9c970db8fbc262f4f6b930aa6c606df9b2766eee06"}, - {file = "coverage-7.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b15e03b8ee6a908db48eccf4e4e42397f146ab1e91c6324da44197a45cb9132"}, - {file = "coverage-7.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:89400aa1752e09f666cc48708eaa171eef0ebe3d5f74044b614729231763ae69"}, - {file = "coverage-7.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c59a3e59fb95e6d72e71dc915e6d7fa568863fad0a80b33bc7b82d6e9f844973"}, - {file = "coverage-7.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ede881c7618f9cf93e2df0421ee127afdfd267d1b5d0c59bcea771cf160ea4a"}, - {file = "coverage-7.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3bfd2c2f0e5384276e12b14882bf2c7621f97c35320c3e7132c156ce18436a1"}, - {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7f3bad1a9313401ff2964e411ab7d57fb700a2d5478b727e13f156c8f89774a0"}, - {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:65d716b736f16e250435473c5ca01285d73c29f20097decdbb12571d5dfb2c94"}, - {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a702e66483b1fe602717020a0e90506e759c84a71dbc1616dd55d29d86a9b91f"}, - {file = "coverage-7.3.3-cp39-cp39-win32.whl", hash = "sha256:7fbf3f5756e7955174a31fb579307d69ffca91ad163467ed123858ce0f3fd4aa"}, - {file = "coverage-7.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cad9afc1644b979211989ec3ff7d82110b2ed52995c2f7263e7841c846a75348"}, - {file = "coverage-7.3.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:d299d379b676812e142fb57662a8d0d810b859421412b4d7af996154c00c31bb"}, - {file = "coverage-7.3.3.tar.gz", hash = "sha256:df04c64e58df96b4427db8d0559e95e2df3138c9916c96f9f6a4dd220db2fdb7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, ] [package.dependencies] @@ -485,13 +489,13 @@ domdf-python-tools = ">=2.2.0" [[package]] name = "docutils" -version = "0.18.1" +version = "0.19" description = "Docutils -- Python Documentation Utilities" optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" files = [ - {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"}, - {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"}, + {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, + {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, ] [[package]] @@ -514,54 +518,6 @@ typing-extensions = ">=3.7.4.1" all = ["pytz (>=2019.1)"] dates = ["pytz (>=2019.1)"] -[[package]] -name = "duckdb" -version = "0.9.2" -description = "DuckDB embedded database" -optional = true -python-versions = ">=3.7.0" -files = [ - {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:aadcea5160c586704c03a8a796c06a8afffbefefb1986601104a60cb0bfdb5ab"}, - {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:08215f17147ed83cbec972175d9882387366de2ed36c21cbe4add04b39a5bcb4"}, - {file = "duckdb-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6c2a8aba6850abef5e1be9dbc04b8e72a5b2c2b67f77892317a21fae868fe7"}, - {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff49f3da9399900fd58b5acd0bb8bfad22c5147584ad2427a78d937e11ec9d0"}, - {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5ac5baf8597efd2bfa75f984654afcabcd698342d59b0e265a0bc6f267b3f0"}, - {file = "duckdb-0.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:81c6df905589a1023a27e9712edb5b724566587ef280a0c66a7ec07c8083623b"}, - {file = "duckdb-0.9.2-cp310-cp310-win32.whl", hash = "sha256:a298cd1d821c81d0dec8a60878c4b38c1adea04a9675fb6306c8f9083bbf314d"}, - {file = "duckdb-0.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:492a69cd60b6cb4f671b51893884cdc5efc4c3b2eb76057a007d2a2295427173"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:061a9ea809811d6e3025c5de31bc40e0302cfb08c08feefa574a6491e882e7e8"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a43f93be768af39f604b7b9b48891f9177c9282a408051209101ff80f7450d8f"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac29c8c8f56fff5a681f7bf61711ccb9325c5329e64f23cb7ff31781d7b50773"}, - {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b14d98d26bab139114f62ade81350a5342f60a168d94b27ed2c706838f949eda"}, - {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:796a995299878913e765b28cc2b14c8e44fae2f54ab41a9ee668c18449f5f833"}, - {file = "duckdb-0.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6cb64ccfb72c11ec9c41b3cb6181b6fd33deccceda530e94e1c362af5f810ba1"}, - {file = "duckdb-0.9.2-cp311-cp311-win32.whl", hash = "sha256:930740cb7b2cd9e79946e1d3a8f66e15dc5849d4eaeff75c8788d0983b9256a5"}, - {file = "duckdb-0.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:c28f13c45006fd525001b2011cdf91fa216530e9751779651e66edc0e446be50"}, - {file = "duckdb-0.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fbce7bbcb4ba7d99fcec84cec08db40bc0dd9342c6c11930ce708817741faeeb"}, - {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15a82109a9e69b1891f0999749f9e3265f550032470f51432f944a37cfdc908b"}, - {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9490fb9a35eb74af40db5569d90df8a04a6f09ed9a8c9caa024998c40e2506aa"}, - {file = "duckdb-0.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:696d5c6dee86c1a491ea15b74aafe34ad2b62dcd46ad7e03b1d00111ca1a8c68"}, - {file = "duckdb-0.9.2-cp37-cp37m-win32.whl", hash = "sha256:4f0935300bdf8b7631ddfc838f36a858c1323696d8c8a2cecbd416bddf6b0631"}, - {file = "duckdb-0.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0aab900f7510e4d2613263865570203ddfa2631858c7eb8cbed091af6ceb597f"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d8130ed6a0c9421b135d0743705ea95b9a745852977717504e45722c112bf7a"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:974e5de0294f88a1a837378f1f83330395801e9246f4e88ed3bfc8ada65dcbee"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4fbc297b602ef17e579bb3190c94d19c5002422b55814421a0fc11299c0c1100"}, - {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dd58a0d84a424924a35b3772419f8cd78a01c626be3147e4934d7a035a8ad68"}, - {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11a1194a582c80dfb57565daa06141727e415ff5d17e022dc5f31888a5423d33"}, - {file = "duckdb-0.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be45d08541002a9338e568dca67ab4f20c0277f8f58a73dfc1435c5b4297c996"}, - {file = "duckdb-0.9.2-cp38-cp38-win32.whl", hash = "sha256:dd6f88aeb7fc0bfecaca633629ff5c986ac966fe3b7dcec0b2c48632fd550ba2"}, - {file = "duckdb-0.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:28100c4a6a04e69aa0f4a6670a6d3d67a65f0337246a0c1a429f3f28f3c40b9a"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ae5bf0b6ad4278e46e933e51473b86b4b932dbc54ff097610e5b482dd125552"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e5d0bb845a80aa48ed1fd1d2d285dd352e96dc97f8efced2a7429437ccd1fe1f"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ce262d74a52500d10888110dfd6715989926ec936918c232dcbaddb78fc55b4"}, - {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6935240da090a7f7d2666f6d0a5e45ff85715244171ca4e6576060a7f4a1200e"}, - {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5cfb93e73911696a98b9479299d19cfbc21dd05bb7ab11a923a903f86b4d06e"}, - {file = "duckdb-0.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:64e3bc01751f31e7572d2716c3e8da8fe785f1cdc5be329100818d223002213f"}, - {file = "duckdb-0.9.2-cp39-cp39-win32.whl", hash = "sha256:6e5b80f46487636368e31b61461940e3999986359a78660a50dfdd17dd72017c"}, - {file = "duckdb-0.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:e6142a220180dbeea4f341708bd5f9501c5c962ce7ef47c1cadf5e8810b4cb13"}, - {file = "duckdb-0.9.2.tar.gz", hash = "sha256:3843afeab7c3fc4a4c0b53686a4cc1d9cdbdadcbb468d60fef910355ecafd447"}, -] - [[package]] name = "exceptiongroup" version = "1.2.0" @@ -709,37 +665,6 @@ files = [ [package.dependencies] flake8 = "*" -[[package]] -name = "gitdb" -version = "4.0.11" -description = "Git Object Database" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, - {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.40" -description = "GitPython is a Python library used to interact with Git repositories" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.40-py3-none-any.whl", hash = "sha256:cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a"}, - {file = "GitPython-3.1.40.tar.gz", hash = "sha256:22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-sugar"] - [[package]] name = "html5lib" version = "1.1" @@ -785,13 +710,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.0" +version = "7.0.1" description = "Read metadata from Python packages" optional = true python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"}, - {file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"}, + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, ] [package.dependencies] @@ -829,13 +754,13 @@ colors = ["colorama (>=0.4.6)"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = true python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -885,61 +810,71 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = true python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -966,13 +901,13 @@ files = [ [[package]] name = "more-itertools" -version = "10.1.0" +version = "10.2.0" description = "More routines for operating on iterables, beyond itertools" optional = false python-versions = ">=3.8" files = [ - {file = "more-itertools-10.1.0.tar.gz", hash = "sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a"}, - {file = "more_itertools-10.1.0-py3-none-any.whl", hash = "sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6"}, + {file = "more-itertools-10.2.0.tar.gz", hash = "sha256:8fccb480c43d3e99a00087634c06dd02b0d50fbf088b380de5a41a015ec239e1"}, + {file = "more_itertools-10.2.0-py3-none-any.whl", hash = "sha256:686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684"}, ] [[package]] @@ -1042,38 +977,38 @@ files = [ [[package]] name = "mypy" -version = "1.7.1" +version = "1.8.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, - {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, - {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, - {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, - {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, - {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, - {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, - {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, - {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, - {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, - {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, - {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, - {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, - {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, - {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, - {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, - {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, - {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, - {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, ] [package.dependencies] @@ -1203,47 +1138,47 @@ files = [ [[package]] name = "numpy" -version = "1.26.2" +version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"}, - {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"}, - {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"}, - {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"}, - {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"}, - {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"}, - {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"}, - {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"}, - {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"}, - {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"}, - {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"}, - {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"}, - {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"}, - {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"}, - {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"}, - {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"}, - {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"}, - {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"}, - {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"}, - {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"}, - {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"}, - {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"}, - {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"}, - {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"}, - {file = "numpy-1.26.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4eb8df4bf8d3d90d091e0146f6c28492b0be84da3e409ebef54349f71ed271ef"}, - {file = "numpy-1.26.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a13860fdcd95de7cf58bd6f8bc5a5ef81c0b0625eb2c9a783948847abbef2c2"}, - {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64308ebc366a8ed63fd0bf426b6a9468060962f1a4339ab1074c228fa6ade8e3"}, - {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818"}, - {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d73a3abcac238250091b11caef9ad12413dab01669511779bc9b29261dd50210"}, - {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b361d369fc7e5e1714cf827b731ca32bff8d411212fccd29ad98ad622449cc36"}, - {file = "numpy-1.26.2-cp39-cp39-win32.whl", hash = "sha256:bd3f0091e845164a20bd5a326860c840fe2af79fa12e0469a12768a3ec578d80"}, - {file = "numpy-1.26.2-cp39-cp39-win_amd64.whl", hash = "sha256:2beef57fb031dcc0dc8fa4fe297a742027b954949cabb52a2a376c144e5e6060"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"}, - {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] @@ -1366,28 +1301,28 @@ files = [ [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -1396,22 +1331,22 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "polars" -version = "0.20.0" +version = "0.20.7" description = "Blazingly fast DataFrame library" optional = false python-versions = ">=3.8" files = [ - {file = "polars-0.20.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:813707e28bdbdf00a849a72d856cad48a4063a1e7f4487bc7068f957988107ce"}, - {file = "polars-0.20.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:7504c63e9783de6cec289f89383b797a5e1fd72c10abc137bba7ee022d34f193"}, - {file = "polars-0.20.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e01beb3e3ad2c3efff209fafd5eff785489d1a6fb196aee2e29237d969389255"}, - {file = "polars-0.20.0-cp38-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:1d78313386fb46ab516eebb521c8612a9acb86c9972af195e771f468c0e5e77d"}, - {file = "polars-0.20.0-cp38-abi3-win_amd64.whl", hash = "sha256:8b8856c7368b605b77e9cf231340ab39a4af86a1dafddde92a2b913c0c47007e"}, - {file = "polars-0.20.0.tar.gz", hash = "sha256:cc70f7e4f195b438de54f51d411fa831d032d8528fc01fea186c8784f06d0f04"}, + {file = "polars-0.20.7-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:fd6100df0ca53614c3fa7136251e030fb70dee8833023edf7a3ac380f8e2dce5"}, + {file = "polars-0.20.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:359d556fafcb533bb0caa34ddfbd5161ee23b8a43817c7a2b80189720a1f42f6"}, + {file = "polars-0.20.7-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cfc71d4569818548c9bf4285c497d62a3a542363c86940593a41dd731e69d7f"}, + {file = "polars-0.20.7-cp38-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:ddff2fa419f15aa64ee23a94655fcb24b3e1b5c3eb30d124e3315deca2039a92"}, + {file = "polars-0.20.7-cp38-abi3-win_amd64.whl", hash = "sha256:f0928576a52eca47e14a8b98f4da22025b4b2fa32549f80f4d92c5187fd3f461"}, + {file = "polars-0.20.7.tar.gz", hash = "sha256:ec742fdf41e16ff699c043259ba94a11bbc2f7dcb978d768495db1ff2b3c5c20"}, ] [package.extras] adbc = ["adbc_driver_sqlite"] -all = ["polars[adbc,cloudpickle,connectorx,deltalake,fsspec,gevent,matplotlib,numpy,pandas,pyarrow,pydantic,pyiceberg,sqlalchemy,timezone,xlsx2csv,xlsxwriter]"] +all = ["polars[adbc,cloudpickle,connectorx,deltalake,fsspec,gevent,numpy,pandas,plot,pyarrow,pydantic,pyiceberg,sqlalchemy,timezone,xlsx2csv,xlsxwriter]"] cloudpickle = ["cloudpickle"] connectorx = ["connectorx (>=0.3.2)"] deltalake = ["deltalake (>=0.14.0)"] @@ -1421,6 +1356,7 @@ matplotlib = ["matplotlib"] numpy = ["numpy (>=1.16.0)"] openpyxl = ["openpyxl (>=3.0.0)"] pandas = ["pandas", "pyarrow (>=7.0.0)"] +plot = ["hvplot (>=0.9.1)"] pyarrow = ["pyarrow (>=7.0.0)"] pydantic = ["pydantic"] pyiceberg = ["pyiceberg (>=0.5.0)"] @@ -1432,51 +1368,51 @@ xlsxwriter = ["xlsxwriter"] [[package]] name = "pyarrow" -version = "14.0.1" +version = "15.0.0" description = "Python library for Apache Arrow" optional = true python-versions = ">=3.8" files = [ - {file = "pyarrow-14.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:96d64e5ba7dceb519a955e5eeb5c9adcfd63f73a56aea4722e2cc81364fc567a"}, - {file = "pyarrow-14.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a8ae88c0038d1bc362a682320112ee6774f006134cd5afc291591ee4bc06505"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f6f053cb66dc24091f5511e5920e45c83107f954a21032feadc7b9e3a8e7851"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:906b0dc25f2be12e95975722f1e60e162437023f490dbd80d0deb7375baf3171"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:78d4a77a46a7de9388b653af1c4ce539350726cd9af62e0831e4f2bd0c95a2f4"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06ca79080ef89d6529bb8e5074d4b4f6086143b2520494fcb7cf8a99079cde93"}, - {file = "pyarrow-14.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:32542164d905002c42dff896efdac79b3bdd7291b1b74aa292fac8450d0e4dcd"}, - {file = "pyarrow-14.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c7331b4ed3401b7ee56f22c980608cf273f0380f77d0f73dd3c185f78f5a6220"}, - {file = "pyarrow-14.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:922e8b49b88da8633d6cac0e1b5a690311b6758d6f5d7c2be71acb0f1e14cd61"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58c889851ca33f992ea916b48b8540735055201b177cb0dcf0596a495a667b00"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30d8494870d9916bb53b2a4384948491444741cb9a38253c590e21f836b01222"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:be28e1a07f20391bb0b15ea03dcac3aade29fc773c5eb4bee2838e9b2cdde0cb"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:981670b4ce0110d8dcb3246410a4aabf5714db5d8ea63b15686bce1c914b1f83"}, - {file = "pyarrow-14.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:4756a2b373a28f6166c42711240643fb8bd6322467e9aacabd26b488fa41ec23"}, - {file = "pyarrow-14.0.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:cf87e2cec65dd5cf1aa4aba918d523ef56ef95597b545bbaad01e6433851aa10"}, - {file = "pyarrow-14.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:470ae0194fbfdfbf4a6b65b4f9e0f6e1fa0ea5b90c1ee6b65b38aecee53508c8"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6263cffd0c3721c1e348062997babdf0151301f7353010c9c9a8ed47448f82ab"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8089d7e77d1455d529dbd7cff08898bbb2666ee48bc4085203af1d826a33cc"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fada8396bc739d958d0b81d291cfd201126ed5e7913cb73de6bc606befc30226"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2a145dab9ed7849fc1101bf03bcdc69913547f10513fdf70fc3ab6c0a50c7eee"}, - {file = "pyarrow-14.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:05fe7994745b634c5fb16ce5717e39a1ac1fac3e2b0795232841660aa76647cd"}, - {file = "pyarrow-14.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:a8eeef015ae69d104c4c3117a6011e7e3ecd1abec79dc87fd2fac6e442f666ee"}, - {file = "pyarrow-14.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c76807540989fe8fcd02285dd15e4f2a3da0b09d27781abec3adc265ddbeba1"}, - {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:450e4605e3c20e558485f9161a79280a61c55efe585d51513c014de9ae8d393f"}, - {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:323cbe60210173ffd7db78bfd50b80bdd792c4c9daca8843ef3cd70b186649db"}, - {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0140c7e2b740e08c5a459439d87acd26b747fc408bde0a8806096ee0baaa0c15"}, - {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:e592e482edd9f1ab32f18cd6a716c45b2c0f2403dc2af782f4e9674952e6dd27"}, - {file = "pyarrow-14.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d264ad13605b61959f2ae7c1d25b1a5b8505b112715c961418c8396433f213ad"}, - {file = "pyarrow-14.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01e44de9749cddc486169cb632f3c99962318e9dacac7778315a110f4bf8a450"}, - {file = "pyarrow-14.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0351fecf0e26e152542bc164c22ea2a8e8c682726fce160ce4d459ea802d69c"}, - {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c1f6110c386464fd2e5e4ea3624466055bbe681ff185fd6c9daa98f30a3f9a"}, - {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11e045dfa09855b6d3e7705a37c42e2dc2c71d608fab34d3c23df2e02df9aec3"}, - {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:097828b55321897db0e1dbfc606e3ff8101ae5725673498cbfa7754ee0da80e4"}, - {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1daab52050a1c48506c029e6fa0944a7b2436334d7e44221c16f6f1b2cc9c510"}, - {file = "pyarrow-14.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3f6d5faf4f1b0d5a7f97be987cf9e9f8cd39902611e818fe134588ee99bf0283"}, - {file = "pyarrow-14.0.1.tar.gz", hash = "sha256:b8b3f4fe8d4ec15e1ef9b599b94683c5216adaed78d5cb4c606180546d1e2ee1"}, + {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, + {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66958fd1771a4d4b754cd385835e66a3ef6b12611e001d4e5edfcef5f30391e2"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f500956a49aadd907eaa21d4fff75f73954605eaa41f61cb94fb008cf2e00c6"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6f87d9c4f09e049c2cade559643424da84c43a35068f2a1c4653dc5b1408a929"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85239b9f93278e130d86c0e6bb455dcb66fc3fd891398b9d45ace8799a871a1e"}, + {file = "pyarrow-15.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b8d43e31ca16aa6e12402fcb1e14352d0d809de70edd185c7650fe80e0769e3"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fa7cd198280dbd0c988df525e50e35b5d16873e2cdae2aaaa6363cdb64e3eec5"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8780b1a29d3c8b21ba6b191305a2a607de2e30dab399776ff0aa09131e266340"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0ec198ccc680f6c92723fadcb97b74f07c45ff3fdec9dd765deb04955ccf19"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036a7209c235588c2f07477fe75c07e6caced9b7b61bb897c8d4e52c4b5f9555"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2bd8a0e5296797faf9a3294e9fa2dc67aa7f10ae2207920dbebb785c77e9dbe5"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e8ebed6053dbe76883a822d4e8da36860f479d55a762bd9e70d8494aed87113e"}, + {file = "pyarrow-15.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d53a9d1b2b5bd7d5e4cd84d018e2a45bc9baaa68f7e6e3ebed45649900ba99"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9950a9c9df24090d3d558b43b97753b8f5867fb8e521f29876aa021c52fda351"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:003d680b5e422d0204e7287bb3fa775b332b3fce2996aa69e9adea23f5c8f970"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f75fce89dad10c95f4bf590b765e3ae98bcc5ba9f6ce75adb828a334e26a3d40"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca9cb0039923bec49b4fe23803807e4ef39576a2bec59c32b11296464623dc2"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ed5a78ed29d171d0acc26a305a4b7f83c122d54ff5270810ac23c75813585e4"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6eda9e117f0402dfcd3cd6ec9bfee89ac5071c48fc83a84f3075b60efa96747f"}, + {file = "pyarrow-15.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a3a6180c0e8f2727e6f1b1c87c72d3254cac909e609f35f22532e4115461177"}, + {file = "pyarrow-15.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:19a8918045993349b207de72d4576af0191beef03ea655d8bdb13762f0cd6eac"}, + {file = "pyarrow-15.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0ec076b32bacb6666e8813a22e6e5a7ef1314c8069d4ff345efa6246bc38593"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db1769e5d0a77eb92344c7382d6543bea1164cca3704f84aa44e26c67e320fb"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2617e3bf9df2a00020dd1c1c6dce5cc343d979efe10bc401c0632b0eef6ef5b"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:d31c1d45060180131caf10f0f698e3a782db333a422038bf7fe01dace18b3a31"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c8c287d1d479de8269398b34282e206844abb3208224dbdd7166d580804674b7"}, + {file = "pyarrow-15.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:07eb7f07dc9ecbb8dace0f58f009d3a29ee58682fcdc91337dfeb51ea618a75b"}, + {file = "pyarrow-15.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:47af7036f64fce990bb8a5948c04722e4e3ea3e13b1007ef52dfe0aa8f23cf7f"}, + {file = "pyarrow-15.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93768ccfff85cf044c418bfeeafce9a8bb0cee091bd8fd19011aff91e58de540"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ee87fd6892700960d90abb7b17a72a5abb3b64ee0fe8db6c782bcc2d0dc0b4"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001fca027738c5f6be0b7a3159cc7ba16a5c52486db18160909a0831b063c4e4"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:d1c48648f64aec09accf44140dccb92f4f94394b8d79976c426a5b79b11d4fa7"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:972a0141be402bb18e3201448c8ae62958c9c7923dfaa3b3d4530c835ac81aed"}, + {file = "pyarrow-15.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f01fc5cf49081426429127aa2d427d9d98e1cb94a32cb961d583a70b7c4504e6"}, + {file = "pyarrow-15.0.0.tar.gz", hash = "sha256:876858f549d540898f927eba4ef77cd549ad8d24baa3207cf1b72e5788b50e83"}, ] [package.dependencies] -numpy = ">=1.16.6" +numpy = ">=1.16.6,<2" [[package]] name = "pycodestyle" @@ -1491,18 +1427,18 @@ files = [ [[package]] name = "pydantic" -version = "2.5.2" +version = "2.6.1" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, - {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.5" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -1510,116 +1446,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.5" +version = "2.16.2" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, - {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, - {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, - {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, - {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, - {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, - {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, - {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, - {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, - {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, - {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, - {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, - {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, - {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, - {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, - {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] @@ -1653,13 +1563,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyright" -version = "1.1.341" +version = "1.1.350" description = "Command line wrapper for pyright" optional = false python-versions = ">=3.7" files = [ - {file = "pyright-1.1.341-py3-none-any.whl", hash = "sha256:f5800daf9d5780ebf6c6e04064a6d20da99c0ef16efd77526f83cc8d8551ff9f"}, - {file = "pyright-1.1.341.tar.gz", hash = "sha256:b891721f3abd10635cc4fd3076bcff5b7676567dc3a629997ed59a0d30034a87"}, + {file = "pyright-1.1.350-py3-none-any.whl", hash = "sha256:f1dde6bcefd3c90aedbe9dd1c573e4c1ddbca8c74bf4fa664dd3b1a599ac9a66"}, + {file = "pyright-1.1.350.tar.gz", hash = "sha256:a8ba676de3a3737ea4d8590604da548d4498cc5ee9ee00b1a403c6db987916c6"}, ] [package.dependencies] @@ -1671,13 +1581,13 @@ dev = ["twine (>=3.4.1)"] [[package]] name = "pytest" -version = "7.4.3" +version = "8.0.0" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, + {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, ] [package.dependencies] @@ -1685,7 +1595,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" +pluggy = ">=1.3.0,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] @@ -1711,13 +1621,13 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "pytest-watcher" -version = "0.3.4" +version = "0.4.1" description = "Automatically rerun your tests on file modifications" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "pytest_watcher-0.3.4-py3-none-any.whl", hash = "sha256:edd2bd9c8a1fb14d48c9f4947234065eb9b4c1acedc0bf213b1f12501dfcffd3"}, - {file = "pytest_watcher-0.3.4.tar.gz", hash = "sha256:d39491ba15b589221bb9a78ef4bed3d5d1503aed08209b1a138aeb95b9117a18"}, + {file = "pytest_watcher-0.4.1-py3-none-any.whl", hash = "sha256:29435669cb0124fb32d6de649fe9b1350f6dac94176313fff559ee4c2a66fd6e"}, + {file = "pytest_watcher-0.4.1.tar.gz", hash = "sha256:5a793c4c883e3a55ab2abbfa3a8cd6fa6495b3767d5f6644052cc5f3236f511a"}, ] [package.dependencies] @@ -1740,13 +1650,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2023.3.post1" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = true python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -1850,13 +1760,13 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruamel-yaml" -version = "0.18.5" +version = "0.18.6" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" optional = true python-versions = ">=3.7" files = [ - {file = "ruamel.yaml-0.18.5-py3-none-any.whl", hash = "sha256:a013ac02f99a69cdd6277d9664689eb1acba07069f912823177c5eced21a6ada"}, - {file = "ruamel.yaml-0.18.5.tar.gz", hash = "sha256:61917e3a35a569c1133a8f772e1226961bf5a1198bea7e23f06a0841dea1ab0e"}, + {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, + {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, ] [package.dependencies] @@ -1911,15 +1821,41 @@ files = [ {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, ] +[[package]] +name = "ruff" +version = "0.2.1" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:dd81b911d28925e7e8b323e8d06951554655021df8dd4ac3045d7212ac4ba080"}, + {file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dc586724a95b7d980aa17f671e173df00f0a2eef23f8babbeee663229a938fec"}, + {file = "ruff-0.2.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c92db7101ef5bfc18e96777ed7bc7c822d545fa5977e90a585accac43d22f18a"}, + {file = "ruff-0.2.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:13471684694d41ae0f1e8e3a7497e14cd57ccb7dd72ae08d56a159d6c9c3e30e"}, + {file = "ruff-0.2.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a11567e20ea39d1f51aebd778685582d4c56ccb082c1161ffc10f79bebe6df35"}, + {file = "ruff-0.2.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:00a818e2db63659570403e44383ab03c529c2b9678ba4ba6c105af7854008105"}, + {file = "ruff-0.2.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be60592f9d218b52f03384d1325efa9d3b41e4c4d55ea022cd548547cc42cd2b"}, + {file = "ruff-0.2.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbd2288890b88e8aab4499e55148805b58ec711053588cc2f0196a44f6e3d855"}, + {file = "ruff-0.2.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ef052283da7dec1987bba8d8733051c2325654641dfe5877a4022108098683"}, + {file = "ruff-0.2.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7022d66366d6fded4ba3889f73cd791c2d5621b2ccf34befc752cb0df70f5fad"}, + {file = "ruff-0.2.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0a725823cb2a3f08ee743a534cb6935727d9e47409e4ad72c10a3faf042ad5ba"}, + {file = "ruff-0.2.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0034d5b6323e6e8fe91b2a1e55b02d92d0b582d2953a2b37a67a2d7dedbb7acc"}, + {file = "ruff-0.2.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e5cb5526d69bb9143c2e4d2a115d08ffca3d8e0fddc84925a7b54931c96f5c02"}, + {file = "ruff-0.2.1-py3-none-win32.whl", hash = "sha256:6b95ac9ce49b4fb390634d46d6ece32ace3acdd52814671ccaf20b7f60adb232"}, + {file = "ruff-0.2.1-py3-none-win_amd64.whl", hash = "sha256:e3affdcbc2afb6f5bd0eb3130139ceedc5e3f28d206fe49f63073cb9e65988e0"}, + {file = "ruff-0.2.1-py3-none-win_arm64.whl", hash = "sha256:efababa8e12330aa94a53e90a81eb6e2d55f348bc2e71adbf17d9cad23c03ee6"}, + {file = "ruff-0.2.1.tar.gz", hash = "sha256:3b42b5d8677cd0c72b99fcaf068ffc62abb5a19e71b4a3b9cfa50658a0af02f1"}, +] + [[package]] name = "setuptools" -version = "69.0.2" +version = "69.0.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, - {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, ] [package.extras] @@ -1938,17 +1874,6 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -[[package]] -name = "smmap" -version = "5.0.1" -description = "A pure Python implementation of a sliding window memory map manager" -optional = false -python-versions = ">=3.7" -files = [ - {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, - {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, -] - [[package]] name = "snowballstemmer" version = "2.2.0" @@ -2094,17 +2019,17 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] [[package]] name = "sphinx-tabs" -version = "3.4.4" +version = "3.4.5" description = "Tabbed views for Sphinx" optional = true python-versions = "~=3.7" files = [ - {file = "sphinx-tabs-3.4.4.tar.gz", hash = "sha256:f1b72c4f23d1ba9cdcaf880fd883524bc70689f561b9785719b8b3c3c5ed0aca"}, - {file = "sphinx_tabs-3.4.4-py3-none-any.whl", hash = "sha256:85939b689a0b0a24bf0da418b9acf14b0b0fca7a7a5cd35461ee452a2d4e716b"}, + {file = "sphinx-tabs-3.4.5.tar.gz", hash = "sha256:ba9d0c1e3e37aaadd4b5678449eb08176770e0fc227e769b6ce747df3ceea531"}, + {file = "sphinx_tabs-3.4.5-py3-none-any.whl", hash = "sha256:92cc9473e2ecf1828ca3f6617d0efc0aa8acb06b08c56ba29d1413f2f0f6cf09"}, ] [package.dependencies] -docutils = ">=0.18.0,<0.19.0" +docutils = "*" pygments = "*" sphinx = "*" @@ -2321,24 +2246,24 @@ files = [ [[package]] name = "types-pytz" -version = "2023.3.1.1" +version = "2024.1.0.20240203" description = "Typing stubs for pytz" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-pytz-2023.3.1.1.tar.gz", hash = "sha256:cc23d0192cd49c8f6bba44ee0c81e4586a8f30204970fc0894d209a6b08dab9a"}, - {file = "types_pytz-2023.3.1.1-py3-none-any.whl", hash = "sha256:1999a123a3dc0e39a2ef6d19f3f8584211de9e6a77fe7a0259f04a524e90a5cf"}, + {file = "types-pytz-2024.1.0.20240203.tar.gz", hash = "sha256:c93751ee20dfc6e054a0148f8f5227b9a00b79c90a4d3c9f464711a73179c89e"}, + {file = "types_pytz-2024.1.0.20240203-py3-none-any.whl", hash = "sha256:9679eef0365db3af91ef7722c199dbb75ee5c1b67e3c4dd7bfbeb1b8a71c21a3"}, ] [[package]] name = "types-setuptools" -version = "69.0.0.0" +version = "69.0.0.20240125" description = "Typing stubs for setuptools" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "types-setuptools-69.0.0.0.tar.gz", hash = "sha256:b0a06219f628c6527b2f8ce770a4f47550e00d3e8c3ad83e2dc31bc6e6eda95d"}, - {file = "types_setuptools-69.0.0.0-py3-none-any.whl", hash = "sha256:8c86195bae2ad81e6dea900a570fe9d64a59dbce2b11cc63c046b03246ea77bf"}, + {file = "types-setuptools-69.0.0.20240125.tar.gz", hash = "sha256:22ad498cb585b22ce8c97ada1fccdf294a2e0dd7dc984a28535a84ea82f45b3f"}, + {file = "types_setuptools-69.0.0.20240125-py3-none-any.whl", hash = "sha256:00835f959ff24ebc32c55da8df9d46e8df25e3c4bfacb43e98b61fde51a4bc41"}, ] [[package]] @@ -2354,65 +2279,68 @@ files = [ [[package]] name = "tzdata" -version = "2023.3" +version = "2023.4" description = "Provider of IANA time zone data" optional = true python-versions = ">=2" files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, + {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, + {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, ] [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] @@ -2431,13 +2359,13 @@ files = [ [[package]] name = "xdoctest" -version = "1.1.2" +version = "1.1.3" description = "A rewrite of the builtin doctest module" optional = false python-versions = ">=3.6" files = [ - {file = "xdoctest-1.1.2-py3-none-any.whl", hash = "sha256:ebe133222534f09597cbe461f97cc5f95ad7b36e5d31f3437caffb9baaddbddb"}, - {file = "xdoctest-1.1.2.tar.gz", hash = "sha256:267d3d4e362547fa917d3deabaf6888232bbf43c8d30298faeb957dbfa7e0ba3"}, + {file = "xdoctest-1.1.3-py3-none-any.whl", hash = "sha256:9360535bd1a971ffc216d9613898cedceb81d0fd024587cc3c03c74d14c00a31"}, + {file = "xdoctest-1.1.3.tar.gz", hash = "sha256:84e76a42a11a5926ff66d9d84c616bc101821099672550481ad96549cbdd02ae"}, ] [package.extras] @@ -2470,10 +2398,9 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [extras] caching = ["pyarrow"] docs = ["Sphinx", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinx-toolbox", "sphinxcontrib-mermaid"] -duckdb = ["duckdb", "pyarrow"] pandas = ["pandas"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "12b075a3eab1fa43746724a0a52aed015c79c18cc4c53c02002e1cef665e9b83" +content-hash = "8c20f55841298048f101fc705ade24e1b6b3cc5b2d22b52f8ab08ea8e3dfee3c" diff --git a/pyproject.toml b/pyproject.toml index 814279f..9b36ea7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ keywords = ["validation", "dataframe"] [tool.poetry.dependencies] python = "^3.8" pydantic = ">=2.0.0" -polars = ">=0.20.0" +polars = ">=0.20.1" # Required for typing.Literal in python3.7 typing-extensions = "*" pandas = {version = "*", optional = true, python = "^3.8"} @@ -40,7 +40,7 @@ docs = [ ] [tool.poetry.group.dev.dependencies] -black = ">=22.3.0" +ruff = ">=0.2.1" coverage = {version = "*", extras = ["toml"]} flake8 = "3.9.2" flake8-annotations = "*" @@ -48,7 +48,6 @@ flake8-bandit = "*" flake8-black = "*" flake8-bugbear = "*" flake8-isort = "*" -isort = "*" pyright = ">=1.1.239" pytest = ">=7.1.2" pytest-cov = ">=3.0.0" diff --git a/src/patito/__init__.py b/src/patito/__init__.py index e29245b..3e5dfe9 100644 --- a/src/patito/__init__.py +++ b/src/patito/__init__.py @@ -1,13 +1,12 @@ """Patito, a data-modelling library built on top of polars and pydantic.""" from polars import Expr, Series, col -from patito import exceptions, sql +from patito import exceptions from patito.exceptions import DataFrameValidationError from patito.polars import DataFrame, LazyFrame from patito.pydantic import Field, Model _CACHING_AVAILABLE = False -_DUCKDB_AVAILABLE = False field = col("_") __all__ = [ "DataFrame", @@ -18,30 +17,12 @@ "Model", "Series", "_CACHING_AVAILABLE", - "_DUCKDB_AVAILABLE", "col", "exceptions", "field", "sql", ] -try: - from patito import duckdb - - _DUCKDB_AVAILABLE = True - __all__ += ["duckdb"] -except ImportError: # pragma: no cover - pass - -try: - from patito.database import Database - - _CACHING_AVAILABLE = True - __all__ += ["Database"] -except ImportError: - pass - - try: from importlib.metadata import PackageNotFoundError, version except ImportError: # pragma: no cover diff --git a/src/patito/_pydantic/__init__.py b/src/patito/_pydantic/__init__.py index e69de29..8b13789 100644 --- a/src/patito/_pydantic/__init__.py +++ b/src/patito/_pydantic/__init__.py @@ -0,0 +1 @@ + diff --git a/src/patito/_pydantic/column_info.py b/src/patito/_pydantic/column_info.py new file mode 100644 index 0000000..0e80acf --- /dev/null +++ b/src/patito/_pydantic/column_info.py @@ -0,0 +1,75 @@ +import json +from typing import ( + Any, + Dict, + Optional, + Sequence, + Type, + TypeVar, + Union, +) + +import polars as pl +from polars.datatypes import DataType, DataTypeClass +from pydantic import BaseModel, field_serializer + +from patito._pydantic.dtypes import parse_composite_dtype + + +class ColumnInfo(BaseModel, arbitrary_types_allowed=True): + """patito-side model for storing column metadata + + Args: + constraints (Union[polars.Expression, List[polars.Expression]): A single + constraint or list of constraints, expressed as a polars expression objects. + All rows must satisfy the given constraint. You can refer to the given column + with ``pt.field``, which will automatically be replaced with + ``polars.col()`` before evaluation. + derived_from (Union[str, polars.Expr]): used to mark fields that are meant to be derived from other fields. Users can specify a polars expression that will be called to derive the column value when `pt.DataFrame.derive` is called. + dtype (polars.datatype.DataType): The given dataframe column must have the given + polars dtype, for instance ``polars.UInt64`` or ``pl.Float32``. + unique (bool): All row values must be unique. + """ + + dtype: Optional[Union[DataTypeClass, DataType]] = None + constraints: Optional[Union[pl.Expr, Sequence[pl.Expr]]] = None + derived_from: Optional[Union[str, pl.Expr]] = None + unique: Optional[bool] = None + + @field_serializer("constraints", "derived_from") + def serialize_exprs(self, exprs: str | pl.Expr | Sequence[pl.Expr] | None) -> Any: + if exprs is None: + return None + elif isinstance(exprs, str): + return exprs + elif isinstance(exprs, pl.Expr): + return self._serialize_expr(exprs) + elif isinstance(exprs, Sequence): + return [self._serialize_expr(c) for c in exprs] + else: + raise ValueError(f"Invalid type for exprs: {type(exprs)}") + + def _serialize_expr(self, expr: pl.Expr) -> Dict: + if isinstance(expr, pl.Expr): + return json.loads( + expr.meta.write_json(None) + ) # can we access the dictionary directly? + else: + raise ValueError(f"Invalid type for expr: {type(expr)}") + + @field_serializer("dtype") + def serialize_dtype(self, dtype: DataTypeClass | DataType | None) -> Any: + """ + References + ---------- + [1] https://stackoverflow.com/questions/76572310/how-to-serialize-deserialize-polars-datatypes + """ + if dtype is None: + return None + elif isinstance(dtype, DataTypeClass) or isinstance(dtype, DataType): + return parse_composite_dtype(dtype) + else: + raise ValueError(f"Invalid type for dtype: {type(dtype)}") + + +CI = TypeVar("CI", bound=Type[ColumnInfo]) diff --git a/src/patito/_pydantic/dtypes.py b/src/patito/_pydantic/dtypes.py deleted file mode 100644 index 0a19d7d..0000000 --- a/src/patito/_pydantic/dtypes.py +++ /dev/null @@ -1,356 +0,0 @@ -from enum import Enum -from typing import ( - Any, - Dict, - FrozenSet, - List, - Optional, - Sequence, -) - -import polars as pl -from polars.datatypes import DataType, DataTypeClass, DataTypeGroup, convert -from polars.datatypes.constants import ( - DATETIME_DTYPES, - DURATION_DTYPES, - FLOAT_DTYPES, - INTEGER_DTYPES, -) -from polars.polars import ( - dtype_str_repr, # TODO: this is a rust function, can we implement our own string parser for Time/Duration/Datetime? -) -from pydantic import TypeAdapter - -from patito._pydantic.repr import display_as_type - -PYTHON_TO_PYDANTIC_TYPES = { - str: "string", - int: "integer", - float: "number", - bool: "boolean", - type(None): "null", -} - -BOOLEAN_DTYPES = DataTypeGroup([pl.Boolean]) -STRING_DTYPES = DataTypeGroup([pl.Utf8]) -DATE_DTYPES = DataTypeGroup([pl.Date]) -TIME_DTYPES = DataTypeGroup([pl.Time]) - -PT_BASE_SUPPORTED_DTYPES = DataTypeGroup( - INTEGER_DTYPES - | FLOAT_DTYPES - | BOOLEAN_DTYPES - | STRING_DTYPES - | DATE_DTYPES - | DATETIME_DTYPES - | DURATION_DTYPES - | TIME_DTYPES -) - - -class PydanticBaseType(Enum): - STRING = "string" - INTEGER = "integer" - NUMBER = "number" - BOOLEAN = "boolean" - NULL = "null" - OBJECT = "object" - - -class PydanticStringFormat(Enum): - DATE = "date" - DATE_TIME = "date-time" - DURATION = "duration" - TIME = "time" - - -def parse_composite_dtype(dtype: DataTypeClass | DataType) -> str: - """for serialization, converts polars dtype to string representation""" - if dtype in pl.NESTED_DTYPES: - if dtype == pl.Struct or isinstance(dtype, pl.Struct): - raise NotImplementedError("Structs not yet supported by patito") - if not isinstance(dtype, pl.List) or isinstance(dtype, pl.Array): - raise NotImplementedError( - f"Unsupported nested dtype: {dtype} of type {type(dtype)}" - ) - if dtype.inner is None: - return convert.DataTypeMappings.DTYPE_TO_FFINAME[dtype.base_type()] - return f"{convert.DataTypeMappings.DTYPE_TO_FFINAME[dtype.base_type()]}[{parse_composite_dtype(dtype.inner)}]" - elif dtype in pl.TEMPORAL_DTYPES: - return dtype_str_repr(dtype) - else: - return convert.DataTypeMappings.DTYPE_TO_FFINAME[dtype] - - -def dtype_from_string(v: str): - """for deserialization""" - # TODO test all dtypes - return convert.dtype_short_repr_to_dtype(v) - - -def validate_polars_dtype( - annotation: type[Any] | None, - dtype: DataType | DataTypeClass | None, - column: Optional[str] = None, -): - """ - Check that the polars dtype is valid for the given annotation. Raises ValueError if not. - - Args: - annotation (type[Any] | None): python type annotation - dtype (DataType | DataTypeClass | None): polars dtype - column (Optional[str], optional): column name. Defaults to None. - """ - if ( - dtype is None or annotation is None - ): # no potential conflict between type annotation and chosen polars type - return - valid_dtypes = valid_polars_dtypes_for_annotation(annotation) - if dtype not in valid_dtypes: - if column: - column_msg = f" for column `{column}`" - else: - column_msg = "" - raise ValueError( - f"Invalid dtype {dtype}{column_msg}. Allowable polars dtypes for {display_as_type(annotation)} are: {', '.join([str(x) for x in valid_dtypes])}." - ) - return - - -def validate_annotation(annotation: type[Any] | None, column: Optional[str] = None): - """ - Check that the provided annotation has polars/patito support (we can resolve it to a default dtype). Raises ValueError if not. - - Args: - annotation (type[Any] | None): python type annotation - column (Optional[str], optional): column name. Defaults to None. - """ - default_dtype = default_polars_dtype_for_annotation(annotation) - if default_dtype is None: - valid_polars_dtypes = valid_polars_dtypes_for_annotation(annotation) - if column: - column_msg = f" for column `{column}`" - else: - column_msg = "" - if len(valid_polars_dtypes) == 0: - raise ValueError( - f"Annotation {display_as_type(annotation)}{column_msg} is not compatible with any polars dtypes." - ) - else: - raise ValueError( - f"Unable to determine default dtype for annotation {display_as_type(annotation)}{column_msg}. Please provide a valid default polars dtype via the `dtype` argument to `Field`. Valid dtypes are: {', '.join([str(x) for x in valid_polars_dtypes])}." - ) - return - - -def valid_polars_dtypes_for_annotation( - annotation: type[Any] | None -) -> FrozenSet[DataTypeClass | DataType]: - """Returns a set of polars types that are valid for the given annotation. If the annotation is Any, returns all supported polars dtypes. - - Args: - annotation (type[Any] | None): python type annotation - - Returns: - FrozenSet[DataTypeClass | DataType]: set of polars dtypes - """ - if annotation == Any: - return PT_BASE_SUPPORTED_DTYPES - schema = TypeAdapter(annotation).json_schema() - return _valid_polars_dtypes_for_schema(schema) - - -def default_polars_dtype_for_annotation( - annotation: type[Any] | None -) -> DataTypeClass | DataType | None: - """Returns the default polars dtype for the given annotation. If the annotation is Any, returns pl.Utf8. If no default dtype can be determined, returns None. - - Args: - annotation (type[Any] | None): python type annotation - - Returns: - DataTypeClass | DataType | None: polars dtype - """ - if annotation == Any: - return pl.Utf8 - schema = TypeAdapter(annotation).json_schema() - return _default_polars_dtype_for_schema(schema) - - -def _valid_polars_dtypes_for_schema( - schema: Dict -) -> FrozenSet[DataTypeClass | DataType]: - valid_type_sets = [] - if "anyOf" in schema: - schema = _without_optional(schema) - for sub_props in schema["anyOf"]: - valid_type_sets.append( - set(_pydantic_subschema_to_valid_polars_types(sub_props)) - ) - else: - valid_type_sets.append(set(_pydantic_subschema_to_valid_polars_types(schema))) - return set.intersection(*valid_type_sets) if valid_type_sets else frozenset() # pyright: ignore - - -def _default_polars_dtype_for_schema(schema: Dict) -> DataTypeClass | DataType | None: - if "anyOf" in schema: - if len(schema["anyOf"]) == 2: # look for optionals first - schema = _without_optional(schema) - if len(schema["anyOf"]) == 1: - schema = schema["anyOf"][0] - else: - return None - return _pydantic_subschema_to_default_dtype(schema) - - -def _without_optional(schema: Dict) -> Dict: - if "anyOf" in schema: - for sub_props in schema["anyOf"]: - if "type" in sub_props and sub_props["type"] == "null": - schema["anyOf"].remove(sub_props) - return schema - - -def _pydantic_subschema_to_valid_polars_types( - props: Dict, -) -> FrozenSet[DataTypeClass | DataType]: - if "type" not in props: - if "enum" in props: - raise TypeError("Mixed type enums not supported by patito.") - elif "const" in props: - return valid_polars_dtypes_for_annotation(type(props["const"])) - return frozenset() - pyd_type = props.get("type") - if pyd_type == "array": - if "items" not in props: - raise NotImplementedError( - "Unexpected error processing pydantic schema. Please file an issue." - ) - array_props = props["items"] - item_dtypes = _valid_polars_dtypes_for_schema(array_props) - # TODO support pl.Array? - return DataTypeGroup([pl.List(dtype) for dtype in item_dtypes]) - return _pyd_type_to_valid_dtypes( - PydanticBaseType(pyd_type), props.get("format"), props.get("enum") - ) - - -def _pydantic_subschema_to_default_dtype( - props: Dict -) -> DataTypeClass | DataType | None: - if "type" not in props: - if "enum" in props: - raise TypeError("Mixed type enums not supported by patito.") - elif "const" in props: - return default_polars_dtype_for_annotation(type(props["const"])) - return None - pyd_type = props.get("type") - if pyd_type == "array": - if "items" not in props: - raise NotImplementedError( - "Unexpected error processing pydantic schema. Please file an issue." - ) - array_props = props["items"] - inner_default_type = _default_polars_dtype_for_schema(array_props) - if inner_default_type is None: - return None - return pl.List(inner_default_type) - return _pyd_type_to_default_dtype( - PydanticBaseType(pyd_type), props.get("format"), props.get("enum") - ) - - -def _pyd_type_to_valid_dtypes( - pyd_type: PydanticBaseType, string_format: Optional[str], enum: List[str] | None -) -> FrozenSet[DataTypeClass | DataType]: - if enum is not None: - _validate_enum_values(pyd_type, enum) - return DataTypeGroup( - [pl.Categorical, pl.Utf8] - ) # TODO use pl.Enum in future polars versions - if pyd_type.value == "integer": - return DataTypeGroup(INTEGER_DTYPES | FLOAT_DTYPES) - elif pyd_type.value == "number": - return FLOAT_DTYPES - elif pyd_type.value == "boolean": - return BOOLEAN_DTYPES - elif pyd_type.value == "string": - _string_format = ( - PydanticStringFormat(string_format) if string_format is not None else None - ) - return _pyd_string_format_to_valid_dtypes(_string_format) - elif pyd_type.value == "null": - return frozenset({pl.Null}) - else: - return frozenset() - - -def _pyd_type_to_default_dtype( - pyd_type: PydanticBaseType, string_format: Optional[str], enum: List[str] | None -) -> DataTypeClass | DataType: - if enum is not None: - _validate_enum_values(pyd_type, enum) - return pl.Categorical - elif pyd_type.value == "integer": - return pl.Int64 - elif pyd_type.value == "number": - return pl.Float64 - elif pyd_type.value == "boolean": - return pl.Boolean - elif pyd_type.value == "string": - _string_format = ( - PydanticStringFormat(string_format) if string_format is not None else None - ) - return _pyd_string_format_to_default_dtype(_string_format) - elif pyd_type.value == "null": - return pl.Null - elif pyd_type.value == "object": - raise ValueError("pydantic object types not currently supported by patito") - else: - raise NotImplementedError - - -def _pyd_string_format_to_valid_dtypes( - string_format: PydanticStringFormat | None -) -> FrozenSet[DataTypeClass | DataType]: - if string_format is None: - return STRING_DTYPES - elif string_format.value == "date": - return DATE_DTYPES - elif string_format.value == "date-time": - return DATETIME_DTYPES - elif string_format.value == "duration": - return DURATION_DTYPES - elif string_format.value == "time": - return TIME_DTYPES - else: - raise NotImplementedError - - -def _pyd_string_format_to_default_dtype( - string_format: PydanticStringFormat | None -) -> DataTypeClass | DataType: - if string_format is None: - return pl.Utf8 - elif string_format.value == "date": - return pl.Date - elif string_format.value == "date-time": - return pl.Datetime - elif string_format.value == "duration": - return pl.Duration - elif string_format.value == "time": - return pl.Time - else: - raise NotImplementedError - - -def _validate_enum_values(pyd_type: PydanticBaseType, enum: Sequence): - enum_types = set(type(value) for value in enum) - if len(enum_types) > 1: - raise TypeError( - f"All enumerated values of enums used to annotate Patito model fields must have the same type. Encountered types: {sorted(map(lambda t: t.__name__, enum_types))}." - ) - if pyd_type.value != "string": - raise TypeError( - f"Enums used to annotate Patito model fields must be strings. Encountered type: {enum_types.pop().__name__}." - ) diff --git a/src/patito/_pydantic/dtypes/__init__.py b/src/patito/_pydantic/dtypes/__init__.py new file mode 100644 index 0000000..2ac6057 --- /dev/null +++ b/src/patito/_pydantic/dtypes/__init__.py @@ -0,0 +1,23 @@ +from patito._pydantic.dtypes.dtypes import ( + DtypeResolver, + default_dtypes_for_model, + valid_dtypes_for_model, + validate_annotation, + validate_polars_dtype, +) +from patito._pydantic.dtypes.utils import ( + PYTHON_TO_PYDANTIC_TYPES, + dtype_from_string, + parse_composite_dtype, +) + +__all__ = [ + "DtypeResolver", + "validate_annotation", + "validate_polars_dtype", + "parse_composite_dtype", + "dtype_from_string", + "valid_dtypes_for_model", + "default_dtypes_for_model", + "PYTHON_TO_PYDANTIC_TYPES", +] diff --git a/src/patito/_pydantic/dtypes/dtypes.py b/src/patito/_pydantic/dtypes/dtypes.py new file mode 100644 index 0000000..4a99676 --- /dev/null +++ b/src/patito/_pydantic/dtypes/dtypes.py @@ -0,0 +1,248 @@ +from __future__ import annotations + +from functools import cache, reduce +from operator import and_ +from typing import TYPE_CHECKING, Any, Dict, FrozenSet, Mapping, Optional, Type + +import polars as pl +from polars.datatypes import DataType, DataTypeClass, DataTypeGroup +from pydantic import TypeAdapter + +from patito._pydantic.dtypes.utils import ( + PT_BASE_SUPPORTED_DTYPES, + PydanticBaseType, + _pyd_type_to_default_dtype, + _pyd_type_to_valid_dtypes, + _without_optional, + dtype_from_string, +) +from patito._pydantic.repr import display_as_type + +if TYPE_CHECKING: + from patito.pydantic import ModelType + + +@cache +def valid_dtypes_for_model(cls: Type[ModelType]) -> Mapping[str, FrozenSet[DataTypeClass]]: + return { + column: DtypeResolver(cls.model_fields[column].annotation).valid_polars_dtypes() + if cls.column_infos[column].dtype is None + else DataTypeGroup([cls.dtypes[column]], match_base_type=False) + for column in cls.columns + } + + +@cache +def default_dtypes_for_model( + cls: Type[ModelType], +) -> dict[str, DataTypeClass | DataType]: + default_dtypes = {} + for column in cls.columns: + dtype = cls.column_infos[column].dtype + if dtype is None: + default_dtype = DtypeResolver( + cls.model_fields[column].annotation + ).default_polars_dtype() + if default_dtype is None: + raise ValueError( + f"Unable to find a default dtype for column `{column}`" + ) + else: + default_dtypes[column] = default_dtype + else: + default_dtypes[column] = dtype + return default_dtypes + + +def validate_polars_dtype( + annotation: type[Any] | None, + dtype: DataType | DataTypeClass | None, + column: Optional[str] = None, +) -> None: + """ + Check that the polars dtype is valid for the given annotation. Raises ValueError if not. + Args: + annotation (type[Any] | None): python type annotation + dtype (DataType | DataTypeClass | None): polars dtype + column (Optional[str], optional): column name. Defaults to None. + """ + if ( + dtype is None or annotation is None + ): # no potential conflict between type annotation and chosen polars type + return + valid_dtypes = DtypeResolver(annotation).valid_polars_dtypes() + if dtype not in valid_dtypes: + if column: + column_msg = f" for column `{column}`" + else: + column_msg = "" + raise ValueError( + f"Invalid dtype {dtype}{column_msg}. Allowable polars dtypes for {display_as_type(annotation)} are: {', '.join([str(x) for x in valid_dtypes])}." + ) + return + + +def validate_annotation( + annotation: type[Any] | Any | None, column: Optional[str] = None +) -> None: + """ + Check that the provided annotation has polars/patito support (we can resolve it to a default dtype). Raises ValueError if not. + Args: + annotation (type[Any] | None): python type annotation + column (Optional[str], optional): column name. Defaults to None. + """ + default_dtype = DtypeResolver(annotation).default_polars_dtype() + if default_dtype is None: + valid_polars_dtypes = DtypeResolver(annotation).valid_polars_dtypes() + if column: + column_msg = f" for column `{column}`" + else: + column_msg = "" + if len(valid_polars_dtypes) == 0: + raise ValueError( + f"Annotation {display_as_type(annotation)}{column_msg} is not compatible with any polars dtypes." + ) + else: + raise ValueError( + f"Unable to determine default dtype for annotation {display_as_type(annotation)}{column_msg}. Please provide a valid default polars dtype via the `dtype` argument to `Field`. Valid dtypes are: {', '.join([str(x) for x in valid_polars_dtypes])}." + ) + return + + +class DtypeResolver: + def __init__(self, annotation: Any | None): + self.annotation = annotation + self.schema = TypeAdapter(annotation).json_schema() + self.defs = self.schema.get("$defs", {}) + + def valid_polars_dtypes(self) -> DataTypeGroup: + if self.annotation == Any: + return PT_BASE_SUPPORTED_DTYPES + return self._valid_polars_dtypes_for_schema(self.schema) + + def default_polars_dtype(self) -> DataTypeClass | DataType | None: + if self.annotation == Any: + return pl.Utf8 + return self._default_polars_dtype_for_schema(self.schema) + + def _valid_polars_dtypes_for_schema( + self, + schema: Dict, + ) -> DataTypeGroup: + valid_type_sets = [] + if "anyOf" in schema: + schema = _without_optional(schema) + for sub_props in schema["anyOf"]: + valid_type_sets.append( + self._pydantic_subschema_to_valid_polars_types(sub_props) + ) + else: + valid_type_sets.append( + self._pydantic_subschema_to_valid_polars_types(schema) + ) + return reduce(and_, valid_type_sets) if valid_type_sets else DataTypeGroup([]) + + def _pydantic_subschema_to_valid_polars_types( + self, + props: Dict, + ) -> DataTypeGroup: + if "type" not in props: + if "enum" in props: + raise TypeError("Mixed type enums not supported by patito.") + elif "const" in props: + return DtypeResolver(type(props["const"])).valid_polars_dtypes() + elif "$ref" in props: + return self._pydantic_subschema_to_valid_polars_types( + self.defs[props["$ref"].split("/")[-1]] + ) + return DataTypeGroup([]) + pyd_type = props.get("type") + if pyd_type == "array": + if "items" not in props: + return DataTypeGroup([]) + array_props = props["items"] + item_dtypes = self._valid_polars_dtypes_for_schema(array_props) + # TODO support pl.Array? + return DataTypeGroup( + [pl.List(dtype) for dtype in item_dtypes], match_base_type=False + ) + elif pyd_type == "object": + if "properties" not in props: + return DataTypeGroup([]) + object_props = props["properties"] + return DataTypeGroup( + [ + pl.Struct( + [ + pl.Field( + name, self._default_polars_dtype_for_schema(sub_props) + ) + for name, sub_props in object_props.items() + ] + ) + ], + match_base_type=False, + ) # for structs, return only the default dtype set to avoid combinatoric issues + return _pyd_type_to_valid_dtypes( + PydanticBaseType(pyd_type), props.get("format"), props.get("enum") + ) + + def _default_polars_dtype_for_schema( + self, schema: Dict + ) -> DataTypeClass | DataType | None: + if "anyOf" in schema: + if len(schema["anyOf"]) == 2: # look for optionals first + schema = _without_optional(schema) + if len(schema["anyOf"]) == 1: + if "column_info" in schema: + schema["anyOf"][0]["column_info"] = schema[ + "column_info" + ] # push column info through optional + schema = schema["anyOf"][0] + else: + return None + return self._pydantic_subschema_to_default_dtype(schema) + + def _pydantic_subschema_to_default_dtype( + self, + props: Dict, + ) -> DataTypeClass | DataType | None: + if "column_info" in props: # user has specified in patito model + if props["column_info"]["dtype"] is not None: + return dtype_from_string(props["column_info"]["dtype"]) + if "type" not in props: + if "enum" in props: + raise TypeError("Mixed type enums not supported by patito.") + elif "const" in props: + return DtypeResolver(type(props["const"])).default_polars_dtype() + elif "$ref" in props: + return self._pydantic_subschema_to_default_dtype( + self.defs[props["$ref"].split("/")[-1]] + ) + return None + pyd_type = props.get("type") + if pyd_type == "array": + if "items" not in props: + raise NotImplementedError( + "Unexpected error processing pydantic schema. Please file an issue." + ) + array_props = props["items"] + inner_default_type = self._default_polars_dtype_for_schema(array_props) + if inner_default_type is None: + return None + return pl.List(inner_default_type) + elif pyd_type == "object": + if "properties" not in props: + raise NotImplementedError( + "dictionaries not currently supported by patito" + ) + object_props = props["properties"] + return pl.Struct( + [ + pl.Field(name, self._default_polars_dtype_for_schema(sub_props)) + for name, sub_props in object_props.items() + ] + ) + return _pyd_type_to_default_dtype( + PydanticBaseType(pyd_type), props.get("format"), props.get("enum") + ) diff --git a/src/patito/_pydantic/dtypes/utils.py b/src/patito/_pydantic/dtypes/utils.py new file mode 100644 index 0000000..5075f33 --- /dev/null +++ b/src/patito/_pydantic/dtypes/utils.py @@ -0,0 +1,202 @@ +from __future__ import annotations + +from enum import Enum +from typing import ( + Any, + Dict, + List, + Optional, + Sequence, + Union, + cast, +) + +import polars as pl +from polars.datatypes import DataType, DataTypeClass, DataTypeGroup, convert +from polars.datatypes.constants import ( + DATETIME_DTYPES, + DURATION_DTYPES, + FLOAT_DTYPES, + INTEGER_DTYPES, +) +from polars.polars import ( + dtype_str_repr, # TODO: this is a rust function, can we implement our own string parser for Time/Duration/Datetime? +) + +PYTHON_TO_PYDANTIC_TYPES = { + str: "string", + int: "integer", + float: "number", + bool: "boolean", + type(None): "null", +} + +BOOLEAN_DTYPES = DataTypeGroup([pl.Boolean]) +STRING_DTYPES = DataTypeGroup([pl.Utf8]) +DATE_DTYPES = DataTypeGroup([pl.Date]) +TIME_DTYPES = DataTypeGroup([pl.Time]) + +PT_BASE_SUPPORTED_DTYPES = DataTypeGroup( + INTEGER_DTYPES + | FLOAT_DTYPES + | BOOLEAN_DTYPES + | STRING_DTYPES + | DATE_DTYPES + | DATETIME_DTYPES + | DURATION_DTYPES + | TIME_DTYPES +) + + +class PydanticBaseType(Enum): + STRING = "string" + INTEGER = "integer" + NUMBER = "number" + BOOLEAN = "boolean" + NULL = "null" + OBJECT = "object" + + +class PydanticStringFormat(Enum): + DATE = "date" + DATE_TIME = "date-time" + DURATION = "duration" + TIME = "time" + + +def parse_composite_dtype(dtype: DataTypeClass | DataType) -> str: + """for serialization, converts polars dtype to string representation""" + if dtype in pl.NESTED_DTYPES: + if dtype == pl.Struct or isinstance(dtype, pl.Struct): + raise NotImplementedError("Structs not yet supported by patito") + if not isinstance(dtype, pl.List) or isinstance(dtype, pl.Array): + raise NotImplementedError( + f"Unsupported nested dtype: {dtype} of type {type(dtype)}" + ) + if dtype.inner is None: + return convert.DataTypeMappings.DTYPE_TO_FFINAME[dtype.base_type()] + return f"{convert.DataTypeMappings.DTYPE_TO_FFINAME[dtype.base_type()]}[{parse_composite_dtype(dtype.inner)}]" + elif dtype in pl.TEMPORAL_DTYPES: + return cast(str, dtype_str_repr(dtype)) + else: + return convert.DataTypeMappings.DTYPE_TO_FFINAME[dtype] + + +def dtype_from_string(v: str) -> Optional[Union[DataTypeClass, DataType]]: + """for deserialization""" + # TODO test all dtypes + return convert.dtype_short_repr_to_dtype(v) + + +def _pyd_type_to_valid_dtypes( + pyd_type: PydanticBaseType, string_format: Optional[str], enum: List[str] | None +) -> DataTypeGroup: + if enum is not None: + _validate_enum_values(pyd_type, enum) + return DataTypeGroup([pl.Enum(enum), pl.Utf8], match_base_type=False) + if pyd_type.value == "integer": + return DataTypeGroup(INTEGER_DTYPES | FLOAT_DTYPES) + elif pyd_type.value == "number": + return ( + FLOAT_DTYPES + if isinstance(FLOAT_DTYPES, DataTypeGroup) + else DataTypeGroup(FLOAT_DTYPES) + ) + elif pyd_type.value == "boolean": + return BOOLEAN_DTYPES + elif pyd_type.value == "string": + _string_format = ( + PydanticStringFormat(string_format) if string_format is not None else None + ) + return _pyd_string_format_to_valid_dtypes(_string_format) + elif pyd_type.value == "null": + return DataTypeGroup([pl.Null]) + else: + return DataTypeGroup([]) + + +def _pyd_type_to_default_dtype( + pyd_type: PydanticBaseType, string_format: Optional[str], enum: List[str] | None +) -> DataTypeClass | DataType: + if enum is not None: + _validate_enum_values(pyd_type, enum) + return pl.Enum(enum) + elif pyd_type.value == "integer": + return pl.Int64() + elif pyd_type.value == "number": + return pl.Float64() + elif pyd_type.value == "boolean": + return pl.Boolean() + elif pyd_type.value == "string": + _string_format = ( + PydanticStringFormat(string_format) if string_format is not None else None + ) + return _pyd_string_format_to_default_dtype(_string_format) + elif pyd_type.value == "null": + return pl.Null() + elif pyd_type.value == "object": + raise ValueError("pydantic object types not currently supported by patito") + else: + raise NotImplementedError + + +def _pyd_string_format_to_valid_dtypes( + string_format: PydanticStringFormat | None, +) -> DataTypeGroup: + if string_format is None: + return STRING_DTYPES + elif string_format.value == "date": + return DATE_DTYPES + elif string_format.value == "date-time": + return ( + DATETIME_DTYPES + if isinstance(DATE_DTYPES, DataTypeGroup) + else DataTypeGroup(DATE_DTYPES) + ) + elif string_format.value == "duration": + return ( + DURATION_DTYPES + if isinstance(DURATION_DTYPES, DataTypeGroup) + else DataTypeGroup(DURATION_DTYPES) + ) + elif string_format.value == "time": + return TIME_DTYPES + else: + raise NotImplementedError + + +def _pyd_string_format_to_default_dtype( + string_format: PydanticStringFormat | None, +) -> DataTypeClass | DataType: + if string_format is None: + return pl.Utf8() + elif string_format.value == "date": + return pl.Date() + elif string_format.value == "date-time": + return pl.Datetime() + elif string_format.value == "duration": + return pl.Duration() + elif string_format.value == "time": + return pl.Time() + else: + raise NotImplementedError + + +def _without_optional(schema: Dict) -> Dict: + if "anyOf" in schema: + for sub_props in schema["anyOf"]: + if "type" in sub_props and sub_props["type"] == "null": + schema["anyOf"].remove(sub_props) + return schema + + +def _validate_enum_values(pyd_type: PydanticBaseType, enum: Sequence[Any]) -> None: + enum_types = set(type(value) for value in enum) + if len(enum_types) > 1: + raise TypeError( + f"All enumerated values of enums used to annotate Patito model fields must have the same type. Encountered types: {sorted(map(lambda t: t.__name__, enum_types))}." + ) + if pyd_type.value != "string": + raise TypeError( + f"Enums used to annotate Patito model fields must be strings. Encountered type: {enum_types.pop().__name__}." + ) diff --git a/src/patito/_pydantic/repr.py b/src/patito/_pydantic/repr.py index f245b97..65ce412 100644 --- a/src/patito/_pydantic/repr.py +++ b/src/patito/_pydantic/repr.py @@ -10,6 +10,7 @@ Optional, Sequence, Tuple, + Type, Union, get_args, get_origin, @@ -37,7 +38,7 @@ if sys.version_info < (3, 10): - def origin_is_union(tp: type[Any] | None) -> bool: + def origin_is_union(tp: Optional[Type[Any]]) -> bool: return tp is typing.Union WithArgsTypes = (TypingGenericAlias,) diff --git a/src/patito/_pydantic/schema.py b/src/patito/_pydantic/schema.py new file mode 100644 index 0000000..bb5739a --- /dev/null +++ b/src/patito/_pydantic/schema.py @@ -0,0 +1,96 @@ +from __future__ import annotations + +from functools import cache +from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional, Type, cast, get_args + +from pydantic.fields import FieldInfo + +from patito._pydantic.column_info import ColumnInfo +from patito._pydantic.dtypes import PYTHON_TO_PYDANTIC_TYPES + +if TYPE_CHECKING: + from patito.pydantic import ModelType + + +@cache +def schema_for_model(cls: Type[ModelType]) -> Dict[str, Dict[str, Any]]: + """ + Return schema properties where definition references have been resolved. + + Returns: + Field information as a dictionary where the keys are field names and the + values are dictionaries containing metadata information about the field + itself. + + Raises: + TypeError: if a field is annotated with an enum where the values are of + different types. + """ + schema = cls.model_json_schema(by_alias=False, ref_template="{model}") + fields = {} + # first resolve definitions for nested models TODO checks for one-way references, if models are self-referencing this falls apart with recursion depth error + for f in cls.model_fields.values(): + annotation = f.annotation + cls._update_dfn(annotation, schema) + for a in get_args(annotation): + cls._update_dfn(a, schema) + for field_name, field_info in schema["properties"].items(): + fields[field_name] = _append_field_info_to_props( + field_info=field_info, + field_name=field_name, + required=field_name in schema.get("required", set()), + model_schema=schema, + ) + schema["properties"] = fields + return schema + + +@cache +def column_infos_for_model(cls: Type[ModelType]) -> Mapping[str, ColumnInfo]: + fields = cls.model_fields + + def get_column_info(field: FieldInfo) -> ColumnInfo: + if field.json_schema_extra is None: + return cast(ColumnInfo, cls.column_info_class()) + elif callable(field.json_schema_extra): + raise NotImplementedError( + "Callable json_schema_extra not supported by patito." + ) + return cast(ColumnInfo, field.json_schema_extra["column_info"]) + + return {k: get_column_info(v) for k, v in fields.items()} + + +def _append_field_info_to_props( + field_info: Dict[str, Any], + field_name: str, + model_schema: Dict[str, Any], + required: Optional[bool] = None, +) -> Dict[str, Any]: + if "$ref" in field_info: # TODO onto runtime append + definition = model_schema["$defs"][field_info["$ref"]] + if "enum" in definition and "type" not in definition: + enum_types = set(type(value) for value in definition["enum"]) + if len(enum_types) > 1: + raise TypeError( + "All enumerated values of enums used to annotate " + "Patito model fields must have the same type. " + "Encountered types: " + f"{sorted(map(lambda t: t.__name__, enum_types))}." + ) + enum_type = enum_types.pop() + definition["type"] = PYTHON_TO_PYDANTIC_TYPES[enum_type] + field = definition + else: + field = field_info + if "items" in field_info: + field["items"] = _append_field_info_to_props( + field_info=field_info["items"], + field_name=field_name, + model_schema=model_schema, + ) + if required is not None: + field["required"] = required + if "const" in field_info and "type" not in field_info: + field["type"] = PYTHON_TO_PYDANTIC_TYPES[type(field_info["const"])] + return field diff --git a/src/patito/exceptions.py b/src/patito/exceptions.py index 893ed66..41d7a79 100644 --- a/src/patito/exceptions.py +++ b/src/patito/exceptions.py @@ -173,7 +173,7 @@ class MissingColumnsError(WrongColumnsError): """Exception for when a dataframe is missing one or more columns.""" -class SuperflousColumnsError(WrongColumnsError): +class SuperfluousColumnsError(WrongColumnsError): """Exception for when a dataframe has one ore more non-specified columns.""" diff --git a/src/patito/polars.py b/src/patito/polars.py index 34ad561..f7754ab 100644 --- a/src/patito/polars.py +++ b/src/patito/polars.py @@ -12,6 +12,10 @@ TypeVar, Union, cast, + Dict, + Sequence, + Mapping, + Tuple ) import polars as pl @@ -20,6 +24,7 @@ from typing_extensions import Literal from patito.exceptions import MultipleRowsReturned, RowDoesNotExist +from patito._pydantic.column_info import ColumnInfo if TYPE_CHECKING: import numpy as np @@ -60,7 +65,7 @@ def _construct_lazyframe_model_class( return cls new_class = type( - f"{model.model_json_schema()['title']}LazyFrame", + f"{model.__name__}LazyFrame", (cls,), {"model": model}, ) @@ -84,6 +89,79 @@ def collect( cls = DataFrame return cls._from_pydf(df._df) + def derive(self: LDF, columns: list[str] | None = None) -> LDF: + derived_columns = [] + props = self.model._schema_properties() + original_columns = set(self.columns) + to_derive = self.model.derived_columns if columns is None else columns + for column_name in to_derive: + if column_name not in derived_columns: + self, _derived_columns = self._derive_column( + self, column_name, self.model.column_infos + ) + derived_columns.extend(_derived_columns) + out_cols = [ + x for x in props if x in original_columns.union(to_derive) + ] # ensure that model columns are first and in the correct order + out_cols += [ + x for x in original_columns.union(to_derive) if x not in out_cols + ] # collect columns originally in data frame that are not in the model and append to end of df + return self.select(out_cols) + + def _derive_column( + self, + df: LDF, + column_name: str, + column_infos: Dict[str, ColumnInfo], + ) -> Tuple[LDF, Sequence[str]]: + if ( + column_infos.get(column_name, None) is None + or column_infos[column_name].derived_from is None + ): + return df, [] + derived_from = column_infos[column_name].derived_from + dtype = self.model.dtypes[column_name] + derived_columns = [] + if isinstance(derived_from, str): + df = df.with_columns(pl.col(derived_from).cast(dtype).alias(column_name)) + elif isinstance(derived_from, pl.Expr): + root_cols = derived_from.meta.root_names() + while root_cols: + root_col = root_cols.pop() + df, _derived_columns = self._derive_column(df, root_col, column_infos) + derived_columns.extend(_derived_columns) + df = df.with_columns(derived_from.cast(dtype).alias(column_name)) + else: + raise TypeError( + "Can not derive dataframe column from type " f"{type(derived_from)}." + ) + derived_columns.append(column_name) + return df, derived_columns + + def cast( + self: LDF, strict: bool = False, columns: Optional[Sequence[str]] = None + ) -> LDF: + properties = self.model._schema_properties() + valid_dtypes = self.model.valid_dtypes + default_dtypes = self.model.dtypes + columns = columns or self.columns + exprs = [] + for column, current_dtype in zip(self.columns, self.dtypes): + if (column not in columns) or (column not in properties): + exprs.append(pl.col(column)) + elif "dtype" in properties[column]: + exprs.append(pl.col(column).cast(properties[column]["dtype"])) + elif not strict and current_dtype in valid_dtypes[column]: + exprs.append(pl.col(column)) + else: + exprs.append(pl.col(column).cast(default_dtypes[column])) + return self.with_columns(exprs) + + @classmethod + def from_existing(cls: Type[LDF], lf: pl.LazyFrame) -> LDF: + """Constructs a patito.DataFrame object from an existing polars.DataFrame object""" + return cls.model.LazyFrame._from_pyldf(lf._ldf).cast() + class DataFrame(pl.DataFrame, Generic[ModelType]): """ @@ -220,7 +298,7 @@ def set_model(self, model): # type: ignore[no-untyped-def] # noqa: ANN001, ANN2 cls._from_pydf(self._df), ) - def cast(self: DF, strict: bool = False) -> DF: + def cast(self: DF, strict: bool = False, columns: Optional[Sequence[str]] = None) -> DF: """ Cast columns to `dtypes` specified by the associated Patito model. @@ -257,20 +335,7 @@ def cast(self: DF, strict: bool = False) -> DF: │ apple ┆ 8 │ └───────┴────────────┘ """ - properties = self.model._schema_properties() - valid_dtypes = self.model.valid_dtypes - default_dtypes = self.model.dtypes - columns = [] - for column, current_dtype in zip(self.columns, self.dtypes): - if column not in properties: - columns.append(pl.col(column)) - elif "dtype" in properties[column]: - columns.append(pl.col(column).cast(properties[column]["dtype"])) - elif not strict and current_dtype in valid_dtypes[column]: - columns.append(pl.col(column)) - else: - columns.append(pl.col(column).cast(default_dtypes[column])) - return self.with_columns(columns) + return self.lazy().cast(strict=strict, columns=columns).collect() def drop( self: DF, @@ -370,7 +435,7 @@ def validate(self: DF) -> DF: self.model.validate(dataframe=self) return self - def derive(self: DF) -> DF: + def derive(self: DF, columns: list[str] | None = None) -> DF: """ Populate columns which have ``pt.Field(derived_from=...)`` definitions. @@ -405,23 +470,7 @@ def derive(self: DF) -> DF: │ 2 ┆ 2 ┆ 4 │ └─────┴─────┴────────────┘ """ - df = self.lazy() - for column_name, info in self.model.column_infos.items(): - if info.derived_from is not None: - derived_from = info.derived_from - dtype = self.model.dtypes[column_name] - if isinstance(derived_from, str): - df = df.with_columns( - pl.col(derived_from).cast(dtype).alias(column_name) - ) - elif isinstance(derived_from, pl.Expr): - df = df.with_columns(derived_from.cast(dtype).alias(column_name)) - else: - raise TypeError( - "Can not derive dataframe column from type " - f"{type(derived_from)}." - ) - return cast(DF, df.collect()) + return cast(DF, self.lazy().derive(columns=columns).collect()) def fill_null( self: DF, diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index e511f5f..f6093cb 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -2,17 +2,18 @@ from __future__ import annotations import itertools -import json from collections.abc import Iterable -from datetime import date, datetime +from datetime import date, datetime, time, timedelta from typing import ( TYPE_CHECKING, Any, ClassVar, Dict, FrozenSet, + Generic, List, Literal, + Mapping, Optional, Sequence, Tuple, @@ -34,14 +35,17 @@ from pydantic._internal._model_construction import ( ModelMetaclass as PydanticModelMetaclass, ) +from zoneinfo import ZoneInfo +from patito._pydantic.column_info import CI, ColumnInfo from patito._pydantic.dtypes import ( - default_polars_dtype_for_annotation, - parse_composite_dtype, - valid_polars_dtypes_for_annotation, + default_dtypes_for_model, + dtype_from_string, + valid_dtypes_for_model, validate_annotation, validate_polars_dtype, ) +from patito._pydantic.schema import column_infos_for_model, schema_for_model from patito.polars import DataFrame, LazyFrame from patito.validators import validate @@ -60,30 +64,15 @@ ModelType = TypeVar("ModelType", bound="Model") -# A mapping from pydantic types to the equivalent type used in DuckDB -PYDANTIC_TO_DUCKDB_TYPES = { - "integer": "BIGINT", - "string": "VARCHAR", - "number": "DOUBLE", - "boolean": "BOOLEAN", -} - -PYTHON_TO_PYDANTIC_TYPES = { - str: "string", - int: "integer", - float: "number", - bool: "boolean", - type(None): "null", -} - - -class ModelMetaclass(PydanticModelMetaclass): +class ModelMetaclass(PydanticModelMetaclass, Generic[CI]): """ Metclass used by patito.Model. Responsible for setting any relevant model-dependent class properties. """ + column_info_class: ClassVar[Type[ColumnInfo]] = ColumnInfo + if TYPE_CHECKING: model_fields: ClassVar[Dict[str, fields.FieldInfo]] @@ -106,40 +95,29 @@ def __init__(cls, name: str, bases: tuple, clsdict: dict, **kwargs) -> None: cls.LazyFrame = LazyFrame._construct_lazyframe_model_class( model=cls, # type: ignore ) - for column in cls.columns: # pyright: ignore TODO why is this needed? - col_info = cls.column_infos[column] - field_info = cls.model_fields[column] - if col_info.dtype: - validate_polars_dtype( - annotation=field_info.annotation, dtype=col_info.dtype - ) - else: - validate_annotation(field_info.annotation) - @property # TODO try cache - def column_infos(cls) -> Dict[str, ColumnInfo]: - """helper method for extracting patito-specific ColumnInfo objects from `model_fields` + def __hash__(self) -> int: + return super().__hash__() - Returns: - Dict[str, ColumnInfo]: dictionary mapping column names to patito-specific column metadata + @property + def column_infos(cls: Type[ModelType]) -> Mapping[str, ColumnInfo]: + return column_infos_for_model(cls) + @property + def model_schema(cls: Type[ModelType]) -> Mapping[str, Mapping[str, Any]]: """ - fields = cls.model_fields - - def get_column_info(field: fields.FieldInfo) -> ColumnInfo: - if field.json_schema_extra is None: - return ColumnInfo() - elif callable(field.json_schema_extra): - raise NotImplementedError( - "Callable json_schema_extra not supported by patito." - ) - return field.json_schema_extra["column_info"] # pyright: ignore # TODO JsonDict fix + Return schema properties where definition references have been resolved. - return {k: get_column_info(v) for k, v in fields.items()} + Returns: + Field information as a dictionary where the keys are field names and the + values are dictionaries containing metadata information about the field + itself. - # @property - # def model_fields(cls) -> Dict[str, fields.FieldInfo]: - # return cls.model_fields + Raises: + TypeError: if a field is annotated with an enum where the values are of + different types. + """ + return schema_for_model(cls) @property def columns(cls: Type[ModelType]) -> List[str]: @@ -183,27 +161,12 @@ def dtypes( # type: ignore >>> Product.dtypes {'name': Utf8, 'ideal_temperature': Int64, 'price': Float64} """ - default_dtypes = {} - for column in cls.columns: - dtype = cls.column_infos[column].dtype - if dtype is None: - default_dtype = default_polars_dtype_for_annotation( - cls.model_fields[column].annotation - ) - if default_dtype is None: - raise ValueError( - f"Unable to find a default dtype for column `{column}`" - ) - else: - default_dtypes[column] = default_dtype - else: - default_dtypes[column] = dtype - return default_dtypes + return default_dtypes_for_model(cls) @property def valid_dtypes( # type: ignore cls: Type[ModelType], # pyright: ignore - ) -> dict[str, FrozenSet[DataTypeClass | DataType]]: + ) -> Mapping[str, FrozenSet[DataTypeClass | DataType]]: """ Return a list of polars dtypes which Patito considers valid for each field. @@ -232,16 +195,8 @@ def valid_dtypes( # type: ignore 'int_column': [Int64, Int32, Int16, Int8, UInt64, UInt32, UInt16, UInt8], 'str_column': [Utf8]} """ + return valid_dtypes_for_model(cls) - return { - column: valid_polars_dtypes_for_annotation( - cls.model_fields[column].annotation - ) - if cls.column_infos[column].dtype is None - else frozenset({cls.column_infos[column].dtype}) - for column in cls.columns - } # pyright: ignore - @property def defaults( # type: ignore cls: Type[ModelType], # pyright: ignore @@ -293,7 +248,10 @@ def non_nullable_columns( # type: ignore return set( k for k in cls.columns - if type(None) not in get_args(cls.model_fields[k].annotation) + if not ( + type(None) in get_args(cls.model_fields[k].annotation) + or cls.model_fields[k].annotation == type(None) + ) ) @property @@ -345,10 +303,32 @@ def unique_columns( # type: ignore infos = cls.column_infos return {column for column in cls.columns if infos[column].unique} + @property + def derived_columns( + cls: Type[ModelType], # type: ignore[misc] + ) -> set[str]: + infos = cls.column_infos + return { + column for column in cls.columns if infos[column].derived_from is not None + } + class Model(BaseModel, metaclass=ModelMetaclass): """Custom pydantic class for representing table schema and constructing rows.""" + @classmethod + def validate_schema(cls: Type[ModelType]): + """Users should run this after defining or edit a model. We withhold the checks at model definition time to avoid expensive queries of the model schema""" + for column in cls.columns: + col_info = cls.column_infos[column] + field_info = cls.model_fields[column] + if col_info.dtype: + validate_polars_dtype( + annotation=field_info.annotation, dtype=col_info.dtype + ) + else: + validate_annotation(field_info.annotation) + @classmethod def from_row( cls: Type[ModelType], # type: ignore[misc] @@ -507,8 +487,9 @@ def validate( @classmethod def example_value( # noqa: C901 cls, - field: str, - ) -> Union[date, datetime, float, int, str, None]: + field: Optional[str] = None, + properties: Optional[Dict[str, Any]] = None, + ) -> Union[date, datetime, time, timedelta, float, int, str, None, Mapping, List]: """ Return a valid example value for the given model field. @@ -537,9 +518,21 @@ def example_value( # noqa: C901 >>> Product.example_value("temperature_zone") 'dry' """ - field_data = cls._schema_properties() - properties = field_data[field] - info = cls.column_infos[field] + if field is None and properties is None: + raise ValueError( + "Either 'field' or 'properties' must be provided as argument." + ) + if field is not None and properties is not None: + raise ValueError( + "Only one of 'field' or 'properties' can be provided as argument." + ) + if field: + properties = cls._schema_properties()[field] + info = cls.column_infos[field] + else: + info = cls.column_info_class() + properties = properties or {} + if "type" in properties: field_type = properties["type"] elif "anyOf" in properties: @@ -549,7 +542,10 @@ def example_value( # noqa: C901 else: field_type = allowable[0] else: - raise NotImplementedError + raise NotImplementedError( + f"Field type for {properties['title']} not found." + ) + if "const" in properties: # The default value is the only valid value, provided as const return properties["const"] @@ -558,7 +554,7 @@ def example_value( # noqa: C901 # A default value has been specified in the model field definition return properties["default"] - elif not properties["required"]: + elif not properties.get("required", True): return None elif field_type == "null": @@ -609,7 +605,17 @@ def example_value( # noqa: C901 elif "format" in properties and properties["format"] == "date": return date(year=1970, month=1, day=1) elif "format" in properties and properties["format"] == "date-time": + if "column_info" in properties: + dtype_str = properties["column_info"]["dtype"] + dtype = dtype_from_string(dtype_str) + return datetime( + year=1970, month=1, day=1, tzinfo=ZoneInfo(dtype.time_zone) + ) return datetime(year=1970, month=1, day=1) + elif "format" in properties and properties["format"] == "time": + return time(12, 30) + elif "format" in properties and properties["format"] == "duration": + return timedelta(1) elif "minLength" in properties: return "a" * properties["minLength"] elif "maxLength" in properties: @@ -620,6 +626,18 @@ def example_value( # noqa: C901 elif field_type == "boolean": return False + elif field_type == "object": + try: + props_o = cls.model_schema["$defs"][properties["title"]]["properties"] + return {f: cls.example_value(properties=props_o[f]) for f in props_o} + except AttributeError: + raise NotImplementedError( + "Nested example generation only supported for nested pt.Model classes." + ) + + elif field_type == "array": + return [cls.example_value(properties=properties["items"])] + else: # pragma: no cover raise NotImplementedError @@ -829,7 +847,9 @@ def examples( ) else: example_value = cls.example_value(field=column_name) - series.append(pl.lit(example_value, dtype=dtype).alias(column_name)) + series.append( + pl.Series(column_name, values=[example_value], dtype=dtype) + ) continue value = kwargs.get(column_name) @@ -1124,48 +1144,17 @@ def with_fields( field_mapping=fields, ) - @classmethod # TODO reduce references to this in favor of ColumnInfo/FieldInfo - def _schema_properties(cls) -> Dict[str, Dict[str, Any]]: - """ - Return schema properties where definition references have been resolved. - - Returns: - Field information as a dictionary where the keys are field names and the - values are dictionaries containing metadata information about the field - itself. - - Raises: - TypeError: if a field is annotated with an enum where the values are of - different types. - """ - schema = cls.model_json_schema(ref_template="{model}") - required = schema.get("required", set()) - fields = {} - for field_name, field_info in schema["properties"].items(): - if "$ref" in field_info: - definition = schema["$defs"][field_info["$ref"]] - if "enum" in definition and "type" not in definition: - enum_types = set(type(value) for value in definition["enum"]) - if len(enum_types) > 1: - raise TypeError( - "All enumerated values of enums used to annotate " - "Patito model fields must have the same type. " - "Encountered types: " - f"{sorted(map(lambda t: t.__name__, enum_types))}." - ) - enum_type = enum_types.pop() - # TODO: Support time-delta, date, and date-time. - definition["type"] = PYTHON_TO_PYDANTIC_TYPES[enum_type] - fields[field_name] = definition - else: - fields[field_name] = field_info - fields[field_name]["required"] = field_name in required - if "const" in field_info and "type" not in field_info: - fields[field_name]["type"] = PYTHON_TO_PYDANTIC_TYPES[ - type(field_info["const"]) - ] + @classmethod + def _schema_properties(cls: Type[ModelType]) -> Mapping[str, Any]: + return cls.model_schema["properties"] - return fields + @classmethod + def _update_dfn(cls, annotation: Any, schema: Dict[str, Any]) -> None: + try: + if issubclass(annotation, Model) and annotation.__name__ != cls.__name__: + schema["$defs"][annotation.__name__] = annotation.model_schema + except TypeError: + pass @classmethod def _derive_model( @@ -1239,63 +1228,6 @@ def _derive_field( return field_type, field_new -class ColumnInfo(BaseModel, arbitrary_types_allowed=True): - """patito-side model for storing column metadata - - Args: - constraints (Union[polars.Expression, List[polars.Expression]): A single - constraint or list of constraints, expressed as a polars expression objects. - All rows must satisfy the given constraint. You can refer to the given column - with ``pt.field``, which will automatically be replaced with - ``polars.col()`` before evaluation. - derived_from (Union[str, polars.Expr]): used to mark fields that are meant to be derived from other fields. Users can specify a polars expression that will be called to derive the column value when `pt.DataFrame.derive` is called. - dtype (polars.datatype.DataType): The given dataframe column must have the given - polars dtype, for instance ``polars.UInt64`` or ``pl.Float32``. - unique (bool): All row values must be unique. - """ - - dtype: DataTypeClass | DataType | None = None - constraints: pl.Expr | Sequence[pl.Expr] | None = None - derived_from: str | pl.Expr | None = None - unique: bool | None = None - - @field_serializer("constraints", "derived_from") - def serialize_exprs(self, exprs: str | pl.Expr | Sequence[pl.Expr] | None) -> Any: - if exprs is None: - return None - elif isinstance(exprs, str): - return exprs - elif isinstance(exprs, pl.Expr): - return self._serialize_expr(exprs) - elif isinstance(exprs, Sequence): - return [self._serialize_expr(c) for c in exprs] - else: - raise ValueError(f"Invalid type for exprs: {type(exprs)}") - - def _serialize_expr(self, expr: pl.Expr) -> Dict: - if isinstance(expr, pl.Expr): - return json.loads( - expr.meta.write_json(None) - ) # can we access the dictionary directly? - else: - raise ValueError(f"Invalid type for expr: {type(expr)}") - - @field_serializer("dtype") - def serialize_dtype(self, dtype: DataTypeClass | DataType | None) -> Any: - """ - - References - ---------- - [1] https://stackoverflow.com/questions/76572310/how-to-serialize-deserialize-polars-datatypes - """ - if dtype is None: - return None - elif isinstance(dtype, DataTypeClass) or isinstance(dtype, DataType): - return parse_composite_dtype(dtype) - else: - raise ValueError(f"Invalid type for dtype: {type(dtype)}") - - def Field( *args, dtype: DataTypeClass diff --git a/src/patito/validators.py b/src/patito/validators.py index 498f0f2..e9e598d 100644 --- a/src/patito/validators.py +++ b/src/patito/validators.py @@ -2,7 +2,7 @@ from __future__ import annotations import sys -from typing import TYPE_CHECKING, Type, Union, cast +from typing import TYPE_CHECKING, Optional, Sequence, Type, Union, cast import polars as pl from typing_extensions import get_args, get_origin @@ -14,7 +14,7 @@ MissingColumnsError, MissingValuesError, RowValueError, - SuperflousColumnsError, + SuperfluousColumnsError, ) if sys.version_info >= (3, 10): # pragma: no cover @@ -94,6 +94,9 @@ def _dewrap_optional(type_annotation: Type) -> Type: def _find_errors( # noqa: C901 dataframe: pl.DataFrame, schema: Type[Model], + columns: Optional[Sequence[str]] = None, + allow_missing_columns: bool = False, + allow_superfluous_columns: bool = False, ) -> list[ErrorWrapper]: """ Validate the given dataframe. @@ -101,39 +104,49 @@ def _find_errors( # noqa: C901 Args: dataframe: Polars DataFrame to be validated. schema: Patito model which specifies how the dataframe should be structured. + columns: If specified, only validate the given columns. Missing columns will + check if any specified columns are missing from the inputted dataframe, + and superfluous columns will check if any columns not specified in the + schema are present in the columns list. + allow_missing_columns: If True, missing columns will not be considered an error. + allow_superfluous_columns: If True, additional columns will not be considered an error. Returns: A list of patito.exception.ErrorWrapper instances. The specific validation error can be retrieved from the "exc" attribute on each error wrapper instance. MissingColumnsError: If there are any missing columns. - SuperflousColumnsError: If there are additional, non-specified columns. + SuperfluousColumnsError: If there are additional, non-specified columns. MissingValuesError: If there are nulls in a non-optional column. ColumnDTypeError: If any column has the wrong dtype. NotImplementedError: If validation has not been implement for the given type. """ errors: list[ErrorWrapper] = [] - # Check if any columns are missing - for missing_column in set(schema.columns) - set(dataframe.columns): - errors.append( - ErrorWrapper( - MissingColumnsError("Missing column"), - loc=missing_column, + schema_subset = columns or schema.columns + column_subset = columns or dataframe.columns + if not allow_missing_columns: + # Check if any columns are missing + for missing_column in set(schema_subset) - set(dataframe.columns): + errors.append( + ErrorWrapper( + MissingColumnsError("Missing column"), + loc=missing_column, + ) ) - ) - # Check if any additional columns are included - for superflous_column in set(dataframe.columns) - set(schema.columns): - errors.append( - ErrorWrapper( - SuperflousColumnsError("Superflous column"), - loc=superflous_column, + if not allow_superfluous_columns: + # Check if any additional columns are included + for superfluous_column in set(column_subset) - set(schema.columns): + errors.append( + ErrorWrapper( + SuperfluousColumnsError("Superfluous column"), + loc=superfluous_column, + ) ) - ) # Check if any non-optional columns have null values - for column in schema.non_nullable_columns.intersection(dataframe.columns): + for column in schema.non_nullable_columns.intersection(column_subset): num_missing_values = dataframe.get_column(name=column).null_count() if num_missing_values: errors.append( @@ -147,10 +160,12 @@ def _find_errors( # noqa: C901 ) for column, dtype in schema.dtypes.items(): + if column not in column_subset: + continue if not isinstance(dtype, pl.List): - continue # TODO add validation here + continue - annotation = schema.model_fields[column].annotation + annotation = schema.model_fields[column].annotation # type: ignore[unreachable] # Retrieve the annotation of the list itself, # dewrapping any potential Optional[...] @@ -190,13 +205,11 @@ def _find_errors( # noqa: C901 dataframe_datatypes = dict(zip(dataframe.columns, dataframe.dtypes)) for column_name, column_properties in schema._schema_properties().items(): column_info = schema.column_infos[column_name] - if column_name not in dataframe.columns: + if column_name not in dataframe.columns or column_name not in column_subset: continue polars_type = dataframe_datatypes[column_name] - if ( - polars_type not in valid_dtypes[column_name] - ): # TODO allow for `strict` validation + if polars_type not in valid_dtypes[column_name]: errors.append( ErrorWrapper( ColumnDTypeError( @@ -311,7 +324,11 @@ def _find_errors( # noqa: C901 def validate( - dataframe: Union["pd.DataFrame", pl.DataFrame], schema: Type[Model] + dataframe: Union["pd.DataFrame", pl.DataFrame], + schema: Type[Model], + columns: Optional[Sequence[str]] = None, + allow_missing_columns: bool = False, + allow_superfluous_columns: bool = False, ) -> None: """ Validate the given dataframe. @@ -319,6 +336,8 @@ def validate( Args: dataframe: Polars DataFrame to be validated. schema: Patito model which specifies how the dataframe should be structured. + allow_missing_columns: If True, missing columns will not be considered an error. + allow_superfluous_columns: If True, additional columns will not be considered an error. Raises: ValidationError: If the given dataframe does not match the given schema. @@ -328,6 +347,12 @@ def validate( else: polars_dataframe = cast(pl.DataFrame, dataframe) - errors = _find_errors(dataframe=polars_dataframe, schema=schema) + errors = _find_errors( + dataframe=polars_dataframe, + schema=schema, + columns=columns, + allow_missing_columns=allow_missing_columns, + allow_superfluous_columns=allow_superfluous_columns, + ) if errors: raise DataFrameValidationError(errors=errors, model=schema) diff --git a/tests/examples.py b/tests/examples.py new file mode 100644 index 0000000..2fd6790 --- /dev/null +++ b/tests/examples.py @@ -0,0 +1,54 @@ +from datetime import date, datetime, time, timedelta +from typing import List, Literal, Optional + +import patito as pt +import polars as pl +from pydantic import AwareDatetime + + +class SmallModel(pt.Model): + a: int + b: str + c: AwareDatetime = pt.Field( + dtype=pl.Datetime(time_zone="UTC") + ) # check that dtype resolver will use patito-specified dtype if passed + d: Optional[AwareDatetime] = pt.Field( + default=None, dtype=pl.Datetime(time_zone="UTC") + ) + e: int = pt.Field(dtype=pl.Int8) + + +class ManyTypes(pt.Model): + int_value: int + float_value: float + str_value: str + bool_value: bool + literal_value: Literal["a", "b"] + default_value: str = "my_default" + optional_value: Optional[int] + bounded_value: int = pt.Field(ge=10, le=20) + date_value: date + datetime_value: datetime + pt_model_value: SmallModel + + +class CompleteModel(pt.Model): + str_column: str + int_column: int + float_column: float + bool_column: bool + + date_column: date + datetime_column: datetime + aware_datetime_column: AwareDatetime = pt.Field(dtype=pl.Datetime(time_zone="UTC")) + duration_column: timedelta + time_column: time + + categorical_column: Literal["a", "b", "c"] + null_column: None = None + + pt_model_column: SmallModel + + list_int_column: List[int] + list_str_column: List[str] + list_opt_column: List[Optional[int]] diff --git a/tests/test_dtypes.py b/tests/test_dtypes.py index 839b392..7ffe71f 100644 --- a/tests/test_dtypes.py +++ b/tests/test_dtypes.py @@ -1,91 +1,115 @@ +from __future__ import annotations + +import sys from datetime import date, datetime, time, timedelta -from typing import Dict, List, Literal, Sequence, Tuple +from typing import Dict, List, Literal, Optional, Sequence, Tuple, Union import polars as pl import pytest -from patito._pydantic.dtypes import ( +from patito._pydantic.dtypes.dtypes import ( + DtypeResolver, + validate_annotation, + validate_polars_dtype, +) +from patito._pydantic.dtypes.utils import ( BOOLEAN_DTYPES, DATE_DTYPES, + STRING_DTYPES, + TIME_DTYPES, +) +from polars.datatypes import DataTypeGroup +from polars.datatypes.constants import ( DATETIME_DTYPES, DURATION_DTYPES, FLOAT_DTYPES, INTEGER_DTYPES, - STRING_DTYPES, - TIME_DTYPES, - DataTypeGroup, - default_polars_dtype_for_annotation, - valid_polars_dtypes_for_annotation, - validate_annotation, - validate_polars_dtype, ) +from pydantic import AwareDatetime + +from tests.examples import ManyTypes -def test_valids_basic_annotations(): +def test_valids_basic_annotations() -> None: # base types - assert valid_polars_dtypes_for_annotation(str) == STRING_DTYPES - assert valid_polars_dtypes_for_annotation(int) == DataTypeGroup( + assert DtypeResolver(str).valid_polars_dtypes() == STRING_DTYPES + assert DtypeResolver(int).valid_polars_dtypes() == DataTypeGroup( INTEGER_DTYPES | FLOAT_DTYPES ) - assert valid_polars_dtypes_for_annotation(float) == FLOAT_DTYPES - assert valid_polars_dtypes_for_annotation(bool) == BOOLEAN_DTYPES + assert DtypeResolver(float).valid_polars_dtypes() == FLOAT_DTYPES + assert DtypeResolver(bool).valid_polars_dtypes() == BOOLEAN_DTYPES # temporals - assert valid_polars_dtypes_for_annotation(datetime) == DATETIME_DTYPES - assert valid_polars_dtypes_for_annotation(date) == DATE_DTYPES - assert valid_polars_dtypes_for_annotation(time) == TIME_DTYPES - assert valid_polars_dtypes_for_annotation(timedelta) == DURATION_DTYPES + assert DtypeResolver(datetime).valid_polars_dtypes() == DATETIME_DTYPES + assert DtypeResolver(date).valid_polars_dtypes() == DATE_DTYPES + assert DtypeResolver(time).valid_polars_dtypes() == TIME_DTYPES + assert DtypeResolver(timedelta).valid_polars_dtypes() == DURATION_DTYPES # other with pytest.raises(TypeError, match="must be strings"): - valid_polars_dtypes_for_annotation(Literal[1, 2, 3]) # pyright: ignore + DtypeResolver(Literal[1, 2, 3]).valid_polars_dtypes() # pyright: ignore with pytest.raises(TypeError, match="Mixed type enums not supported"): - valid_polars_dtypes_for_annotation(Literal[1, 2, "3"]) # pyright: ignore + DtypeResolver(Literal[1, 2, "3"]).valid_polars_dtypes() # pyright: ignore - assert valid_polars_dtypes_for_annotation(Literal["a", "b", "c"]) == { # pyright: ignore - pl.Categorical, + assert DtypeResolver(Literal["a", "b", "c"]).valid_polars_dtypes() == { # pyright: ignore + pl.Enum(["a", "b", "c"]), pl.Utf8, } # combos - assert valid_polars_dtypes_for_annotation(str | None) == STRING_DTYPES - assert valid_polars_dtypes_for_annotation(int | float) == FLOAT_DTYPES + assert DtypeResolver(Optional[str]).valid_polars_dtypes() == STRING_DTYPES + if sys.version_info[1] >= 10: + assert ( + DtypeResolver(str | None | None).valid_polars_dtypes() == STRING_DTYPES + ) # superfluous None is ok + assert DtypeResolver(Union[int, float]).valid_polars_dtypes() == FLOAT_DTYPES assert ( - valid_polars_dtypes_for_annotation(str | int) == frozenset() - ) # incompatible, TODO raise patito error with strict validation on + DtypeResolver(Union[str, int]).valid_polars_dtypes() == frozenset() + ) # incompatible # invalids - assert valid_polars_dtypes_for_annotation(object) == frozenset() + assert DtypeResolver(object).valid_polars_dtypes() == frozenset() -def test_valids_nested_annotations(): - assert len(valid_polars_dtypes_for_annotation(List)) == 0 # needs inner annotation +def test_valids_nested_annotations() -> None: + assert len(DtypeResolver(List).valid_polars_dtypes()) == 0 # needs inner annotation assert ( - valid_polars_dtypes_for_annotation(Tuple) - == valid_polars_dtypes_for_annotation(List) - == valid_polars_dtypes_for_annotation(Sequence) + DtypeResolver(Tuple).valid_polars_dtypes() + == DtypeResolver(List).valid_polars_dtypes() + == DtypeResolver(Sequence).valid_polars_dtypes() ) # for now, these are the same - assert valid_polars_dtypes_for_annotation(List[str]) == {pl.List(pl.Utf8)} - assert valid_polars_dtypes_for_annotation(List[str] | None) == {pl.List(pl.Utf8)} - assert len(valid_polars_dtypes_for_annotation(List[int])) == len( + assert DtypeResolver(List[str]).valid_polars_dtypes() == {pl.List(pl.Utf8)} + assert DtypeResolver(Optional[List[str]]).valid_polars_dtypes() == { + pl.List(pl.Utf8) + } + assert len(DtypeResolver(List[int]).valid_polars_dtypes()) == len( DataTypeGroup(INTEGER_DTYPES | FLOAT_DTYPES) ) - assert len(valid_polars_dtypes_for_annotation(List[int | float])) == len( + assert len(DtypeResolver(List[Union[int, float]]).valid_polars_dtypes()) == len( FLOAT_DTYPES ) - assert len(valid_polars_dtypes_for_annotation(List[int | None])) == len( + assert len(DtypeResolver(List[Optional[int]]).valid_polars_dtypes()) == len( DataTypeGroup(INTEGER_DTYPES | FLOAT_DTYPES) ) - assert valid_polars_dtypes_for_annotation(List[List[str]]) == { + assert DtypeResolver(List[List[str]]).valid_polars_dtypes() == { pl.List(pl.List(pl.Utf8)) } # recursion works as expected assert ( - valid_polars_dtypes_for_annotation(Dict) == frozenset() + DtypeResolver(Dict).valid_polars_dtypes() == frozenset() ) # not currently supported + # support for nested models via struct + assert ( + len(DtypeResolver(ManyTypes).valid_polars_dtypes()) == 1 + ) # only defaults are valid + assert ( + DtypeResolver(ManyTypes).valid_polars_dtypes() + == DtypeResolver(Optional[ManyTypes]).valid_polars_dtypes() + ) + -def test_dtype_validation(): +def test_dtype_validation() -> None: validate_polars_dtype(int, pl.Int16) # no issue validate_polars_dtype(int, pl.Float64) # no issue with pytest.raises(ValueError, match="Invalid dtype"): @@ -94,61 +118,78 @@ def test_dtype_validation(): with pytest.raises(ValueError, match="Invalid dtype"): validate_polars_dtype(List[str], pl.List(pl.Float64)) + # some potential corner cases + validate_polars_dtype(AwareDatetime, dtype=pl.Datetime(time_zone="UTC")) -def test_defaults_basic_annotations(): + +def test_defaults_basic_annotations() -> None: # base types - assert default_polars_dtype_for_annotation(str) == pl.Utf8 - assert default_polars_dtype_for_annotation(int) == pl.Int64 - assert default_polars_dtype_for_annotation(float) == pl.Float64 - assert default_polars_dtype_for_annotation(bool) == pl.Boolean + assert DtypeResolver(str).default_polars_dtype() == pl.Utf8 + assert DtypeResolver(int).default_polars_dtype() == pl.Int64 + assert DtypeResolver(float).default_polars_dtype() == pl.Float64 + assert DtypeResolver(bool).default_polars_dtype() == pl.Boolean # temporals - assert default_polars_dtype_for_annotation(datetime) == pl.Datetime - assert default_polars_dtype_for_annotation(date) == pl.Date - assert default_polars_dtype_for_annotation(time) == pl.Time - assert default_polars_dtype_for_annotation(timedelta) == pl.Duration + assert DtypeResolver(datetime).default_polars_dtype() == pl.Datetime + assert DtypeResolver(date).default_polars_dtype() == pl.Date + assert DtypeResolver(time).default_polars_dtype() == pl.Time + assert DtypeResolver(timedelta).default_polars_dtype() == pl.Duration # combos - assert default_polars_dtype_for_annotation(str | None) == pl.Utf8 - assert default_polars_dtype_for_annotation(int | float) == None - assert default_polars_dtype_for_annotation(str | int) == None + assert DtypeResolver(Optional[str]).default_polars_dtype() == pl.Utf8 + assert DtypeResolver(Union[int, float]).default_polars_dtype() is None + assert DtypeResolver(Union[str, int]).default_polars_dtype() is None + + # other + literal = DtypeResolver(Literal["a", "b", "c"]).default_polars_dtype() + assert literal == pl.Enum(["a", "b", "c"]) + assert set(literal.categories) == {"a", "b", "c"} # invalids - assert default_polars_dtype_for_annotation(object) == None + assert DtypeResolver(object).default_polars_dtype() is None -def test_defaults_nested_annotations(): - assert default_polars_dtype_for_annotation(List) == None # needs inner annotation +def test_defaults_nested_annotations() -> None: + assert DtypeResolver(List).default_polars_dtype() is None # needs inner annotation - assert default_polars_dtype_for_annotation(List[str]) == pl.List(pl.Utf8) - assert default_polars_dtype_for_annotation(List[str] | None) == pl.List(pl.Utf8) - assert default_polars_dtype_for_annotation(List[int]) == pl.List(pl.Int64) - assert default_polars_dtype_for_annotation(List[int | None]) == pl.List(pl.Int64) - assert default_polars_dtype_for_annotation(List[int | float]) == None - assert default_polars_dtype_for_annotation(List[str | int]) == None - assert default_polars_dtype_for_annotation(List[List[str]]) == pl.List( + assert DtypeResolver(List[str]).default_polars_dtype() == pl.List(pl.Utf8) + assert DtypeResolver(Optional[List[str]]).default_polars_dtype() == pl.List(pl.Utf8) + assert DtypeResolver(List[int]).default_polars_dtype() == pl.List(pl.Int64) + assert DtypeResolver(List[Optional[int]]).default_polars_dtype() == pl.List( + pl.Int64 + ) + assert DtypeResolver(List[Union[int, float]]).default_polars_dtype() is None + assert DtypeResolver(List[Union[str, int]]).default_polars_dtype() is None + assert DtypeResolver(List[List[str]]).default_polars_dtype() == pl.List( pl.List(pl.Utf8) ) # recursion works as expected - assert default_polars_dtype_for_annotation(List[List[str | None]]) == pl.List( + assert DtypeResolver(List[List[Optional[str]]]).default_polars_dtype() == pl.List( pl.List(pl.Utf8) ) with pytest.raises( - ValueError, match="pydantic object types not currently supported" + NotImplementedError, match="dictionaries not currently supported" ): - default_polars_dtype_for_annotation(Dict) + DtypeResolver(Dict).default_polars_dtype() + + # support for nested models via struct + many_types = DtypeResolver(ManyTypes).default_polars_dtype() + assert many_types == pl.Struct + assert len(many_types.fields) == len(ManyTypes.columns) + assert DtypeResolver(Optional[ManyTypes]).default_polars_dtype() == many_types -def test_annotation_validation(): +def test_annotation_validation() -> None: validate_annotation(int) # no issue - validate_annotation(int | None) + validate_annotation(Optional[int]) + with pytest.raises(ValueError, match="Valid dtypes are:"): - validate_annotation(int | float) + validate_annotation(Union[int, float]) with pytest.raises(ValueError, match="not compatible with any polars dtypes"): - validate_annotation(str | int) + validate_annotation(Union[str, int]) - validate_annotation(List[int | None]) + validate_annotation(List[Optional[int]]) with pytest.raises(ValueError, match="not compatible with any polars dtypes"): - validate_annotation(List[str | int]) + validate_annotation(List[Union[str, int]]) with pytest.raises(ValueError, match="Valid dtypes are:"): - validate_annotation(List[int | float]) + validate_annotation(List[Union[int, float]]) diff --git a/tests/test_dummy_data.py b/tests/test_dummy_data.py index 3071edb..1cb8f5d 100644 --- a/tests/test_dummy_data.py +++ b/tests/test_dummy_data.py @@ -96,7 +96,7 @@ class UniqueModel(pt.Model): assert example_df[column].is_duplicated().sum() == 0 -def test_enum_field_example_values(): +def test_enum_field_example_values() -> None: """It should produce correct example values for enums.""" class DefaultEnumModel(pt.Model): @@ -111,16 +111,22 @@ class DefaultEnumModel(pt.Model): # Workaround for pola-rs/polars#4253 example_df = DefaultEnumModel.examples({"row_number": [1]}).with_columns( - pl.col("none_default_optional_enum_field").cast(pl.Categorical) + pl.col("none_default_optional_enum_field").cast(pl.Enum(["a", "b", "c"])) ) correct_example_df = pl.DataFrame( [ pl.Series("row_number", [1], dtype=pl.Int64), - pl.Series("enum_field", ["a"], dtype=pl.Categorical), - pl.Series("default_enum_field", ["b"], dtype=pl.Categorical), - pl.Series("default_optional_enum_field", ["c"], dtype=pl.Categorical), - pl.Series("none_default_optional_enum_field", [None], dtype=pl.Categorical), + pl.Series("enum_field", ["a"], dtype=pl.Enum(["a", "b", "c"])), + pl.Series("default_enum_field", ["b"], dtype=pl.Enum(["a", "b", "c"])), + pl.Series( + "default_optional_enum_field", ["c"], dtype=pl.Enum(["a", "b", "c"]) + ), + pl.Series( + "none_default_optional_enum_field", + [None], + dtype=pl.Enum(["a", "b", "c"]), + ), ] ) diff --git a/tests/test_model.py b/tests/test_model.py index 22e7ead..861e8b8 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -2,22 +2,27 @@ # pyright: reportPrivateImportUsage=false import enum import re -from datetime import date, datetime, timedelta +from datetime import date, datetime from typing import Literal, Optional, Type import patito as pt import polars as pl import pytest -from patito._pydantic.dtypes import ( +from patito._pydantic.dtypes.utils import ( DATE_DTYPES, + TIME_DTYPES, +) +from polars.datatypes import DataTypeGroup +from polars.datatypes.constants import ( DATETIME_DTYPES, DURATION_DTYPES, FLOAT_DTYPES, INTEGER_DTYPES, - DataTypeGroup, ) from pydantic import ValidationError +from tests.examples import CompleteModel + def test_model_example(): """Test for Model.example().""" @@ -179,29 +184,30 @@ class CustomModel(pt.Model): def test_mapping_to_polars_dtypes(): """Model fields should be mappable to polars dtypes.""" - class CompleteModel(pt.Model): - str_column: str - int_column: int - float_column: float - bool_column: bool - - date_column: date - datetime_column: datetime - duration_column: timedelta - - categorical_column: Literal["a", "b", "c"] - null_column: None - assert CompleteModel.dtypes == { - "str_column": pl.Utf8, - "int_column": pl.Int64, - "float_column": pl.Float64, - "bool_column": pl.Boolean, - "date_column": pl.Date, - "datetime_column": pl.Datetime, - "duration_column": pl.Duration, - "categorical_column": pl.Categorical, - "null_column": pl.Null, + "str_column": pl.Utf8(), + "int_column": pl.Int64(), + "float_column": pl.Float64(), + "bool_column": pl.Boolean(), + "date_column": pl.Date(), + "datetime_column": pl.Datetime(), + "aware_datetime_column": pl.Datetime(time_zone="UTC"), + "duration_column": pl.Duration(), + "time_column": pl.Time(), + "categorical_column": pl.Enum(["a", "b", "c"]), + "null_column": pl.Null(), + "pt_model_column": pl.Struct( + [ + pl.Field("a", pl.Int64), + pl.Field("b", pl.Utf8), + pl.Field("c", pl.Datetime(time_zone="UTC")), + pl.Field("d", pl.Datetime(time_zone="UTC")), + pl.Field("e", pl.Int8), + ] + ), + "list_int_column": pl.List(pl.Int64), + "list_str_column": pl.List(pl.Utf8), + "list_opt_column": pl.List(pl.Int64), } assert CompleteModel.valid_dtypes == { @@ -211,11 +217,36 @@ class CompleteModel(pt.Model): "bool_column": {pl.Boolean}, "date_column": DATE_DTYPES, "datetime_column": DATETIME_DTYPES, + "aware_datetime_column": {pl.Datetime(time_zone="UTC")}, "duration_column": DURATION_DTYPES, - "categorical_column": {pl.Categorical, pl.Utf8}, + "time_column": TIME_DTYPES, + "categorical_column": {pl.Enum(["a", "b", "c"]), pl.Utf8}, "null_column": {pl.Null}, + "pt_model_column": DataTypeGroup( + [ + pl.Struct( + [ + pl.Field("a", pl.Int64), + pl.Field("b", pl.Utf8), + pl.Field("c", pl.Datetime(time_zone="UTC")), + pl.Field("d", pl.Datetime(time_zone="UTC")), + pl.Field("e", pl.Int8), + ] + ) + ] + ), + "list_int_column": DataTypeGroup( + [pl.List(x) for x in DataTypeGroup(INTEGER_DTYPES | FLOAT_DTYPES)] + ), + "list_str_column": DataTypeGroup([pl.List(pl.Utf8)]), + "list_opt_column": DataTypeGroup( + [pl.List(x) for x in DataTypeGroup(INTEGER_DTYPES | FLOAT_DTYPES)] + ), } + CompleteModel.example(int_column=2) + CompleteModel.validate(CompleteModel.examples({"int_column": [1, 2, 3]})) + def test_model_joins(): """It should produce models compatible with join statements.""" @@ -355,7 +386,7 @@ class ABCEnum(enum.Enum): class EnumModel(pt.Model): column: ABCEnum - assert EnumModel.dtypes["column"] == pl.Categorical + assert EnumModel.dtypes["column"] == pl.Enum(["a", "b", "c"]) assert EnumModel.example_value(field="column") == "a" assert EnumModel.example() == EnumModel(column="a") @@ -370,10 +401,7 @@ class MultiTypedEnum(enum.Enum): class InvalidEnumModel(pt.Model): column: MultiTypedEnum - if pt._DUCKDB_AVAILABLE: # pragma: no cover - assert EnumModel.sql_types["column"].startswith("enum__") - with pytest.raises(TypeError, match=r".*Encountered types: \['int', 'str'\]\."): - InvalidEnumModel.sql_types # pyright: ignore + InvalidEnumModel.validate_schema() def test_column_infos(): @@ -416,21 +444,27 @@ class Test2(pt.Model): def test_conflicting_type_dtype(): - with pytest.raises(ValueError, match="Invalid dtype Utf8") as e: + with pytest.raises(ValueError, match="Invalid dtype String") as e: class Test1(pt.Model): foo: int = pt.Field(dtype=pl.Utf8) + Test1.validate_schema() + with pytest.raises(ValueError, match="Invalid dtype Float32") as e: class Test2(pt.Model): foo: str = pt.Field(dtype=pl.Float32) + Test2.validate_schema() + with pytest.raises(ValueError, match="Invalid dtype UInt32") as e: class Test3(pt.Model): foo: str | None = pt.Field(dtype=pl.UInt32) + Test3.validate_schema() + def test_polars_python_type_harmonization(): class Test(pt.Model): diff --git a/tests/test_validators.py b/tests/test_validators.py index 7d6af23..e8cceaa 100644 --- a/tests/test_validators.py +++ b/tests/test_validators.py @@ -73,7 +73,7 @@ class SingleColumnModel(pt.Model): ] -def test_superflous_column_validation(): +def test_superfluous_column_validation(): """Validation should catch superflous columns.""" class SingleColumnModel(pt.Model): @@ -95,8 +95,8 @@ class SingleColumnModel(pt.Model): assert len(e_info.value.errors()) == 1 assert errors[0] == { "loc": ("column_2",), - "msg": "Superflous column", - "type": "type_error.superflouscolumns", + "msg": "Superfluous column", + "type": "type_error.superfluouscolumns", } @@ -206,12 +206,16 @@ class CompleteModel(pt.Model): class NonCompatibleModel(pt.Model): # TODO catch value error my_field: object + NonCompatibleModel.validate_schema() + # The same goes for list-annotated fields with pytest.raises(ValueError, match="not compatible with any polars dtypes"): class NonCompatibleListModel(pt.Model): my_field: List[object] + NonCompatibleListModel.validate_schema() + # It should also work with pandas data frames class PandasCompatibleModel(CompleteModel): date_column: str # type: ignore From dbad4cffe1a0a09b073768a948c8bfb4eac16490 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Thu, 8 Feb 2024 12:46:02 -0500 Subject: [PATCH 24/29] chore: add tests, some new features - support validation for column subsets - tests for nested models (as structs) - fill_null adds missing columns with default values - test recursive derivation - test derive column subset - allow conversion pt.DataFrame -> pl.DataFrame - support pydantic validation_alias chore: fixes for python 3.9 (all tests passing) chore(patito): cleanup --- src/patito/_pydantic/column_info.py | 2 + src/patito/_pydantic/dtypes/dtypes.py | 4 +- src/patito/polars.py | 95 ++++++++-- src/patito/pydantic.py | 72 +++++--- tests/test_dummy_data.py | 74 +++++--- tests/test_model.py | 178 ++++++++++++------- tests/test_polars.py | 243 ++++++++++++++++++++++++-- tests/test_validators.py | 107 ++++++++---- 8 files changed, 613 insertions(+), 162 deletions(-) diff --git a/src/patito/_pydantic/column_info.py b/src/patito/_pydantic/column_info.py index 0e80acf..582b764 100644 --- a/src/patito/_pydantic/column_info.py +++ b/src/patito/_pydantic/column_info.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from typing import ( Any, diff --git a/src/patito/_pydantic/dtypes/dtypes.py b/src/patito/_pydantic/dtypes/dtypes.py index 4a99676..3f343f1 100644 --- a/src/patito/_pydantic/dtypes/dtypes.py +++ b/src/patito/_pydantic/dtypes/dtypes.py @@ -23,7 +23,9 @@ @cache -def valid_dtypes_for_model(cls: Type[ModelType]) -> Mapping[str, FrozenSet[DataTypeClass]]: +def valid_dtypes_for_model( + cls: Type[ModelType], +) -> Mapping[str, FrozenSet[DataTypeClass]]: return { column: DtypeResolver(cls.model_fields[column].annotation).valid_polars_dtypes() if cls.column_infos[column].dtype is None diff --git a/src/patito/polars.py b/src/patito/polars.py index f7754ab..8b4c0fc 100644 --- a/src/patito/polars.py +++ b/src/patito/polars.py @@ -5,26 +5,25 @@ TYPE_CHECKING, Any, Collection, + Dict, Generic, Iterable, Optional, + Sequence, + Tuple, Type, TypeVar, Union, cast, - Dict, - Sequence, - Mapping, - Tuple ) import polars as pl from polars.type_aliases import IntoExpr -from pydantic import create_model +from pydantic import AliasChoices, AliasPath, create_model from typing_extensions import Literal -from patito.exceptions import MultipleRowsReturned, RowDoesNotExist from patito._pydantic.column_info import ColumnInfo +from patito.exceptions import MultipleRowsReturned, RowDoesNotExist if TYPE_CHECKING: import numpy as np @@ -138,6 +137,54 @@ def _derive_column( derived_columns.append(column_name) return df, derived_columns + def unalias(self: LDF) -> LDF: + if not any(fi.validation_alias for fi in self.model.model_fields.values()): + return self + exprs = [] + + def to_expr(va: str | AliasPath | AliasChoices) -> Optional[pl.Expr]: + if isinstance(va, str): + return pl.col(va) if va in self.columns else None + elif isinstance(va, AliasPath): + if len(va.path) != 2 or not isinstance(va.path[1], int): + raise NotImplementedError( + f"TODO figure out how this AliasPath behaves ({va})" + ) + return ( + pl.col(va.path[0]).list.get(va.path[1]) + if va.path[0] in self.columns + else None + ) + elif isinstance(va, AliasChoices): + local_expr: Optional[pl.Expr] = None + for choice in va.choices: + if (part := to_expr(choice)) is not None: + local_expr = ( + local_expr.fill_null(value=part) + if local_expr is not None + else part + ) + return local_expr + else: + raise NotImplementedError( + f"unknown validation_alias type {field_info.validation_alias}" + ) + + for name, field_info in self.model.model_fields.items(): + if field_info.validation_alias is None: + exprs.append(pl.col(name)) + else: + expr = to_expr(field_info.validation_alias) + if name in self.columns: + if expr is None: + exprs.append(pl.col(name)) + else: + exprs.append(pl.col(name).fill_null(value=expr)) + elif expr is not None: + exprs.append(expr.alias(name)) + + return self.select(exprs) + def cast( self: LDF, strict: bool = False, columns: Optional[Sequence[str]] = None ) -> LDF: @@ -298,7 +345,23 @@ def set_model(self, model): # type: ignore[no-untyped-def] # noqa: ANN001, ANN2 cls._from_pydf(self._df), ) - def cast(self: DF, strict: bool = False, columns: Optional[Sequence[str]] = None) -> DF: + def unalias(self: DF) -> DF: + """ + Un-aliases column names using information from pydantic validation_alias. + + In order of preference - model field name then validation_aliases in order of occurrence + + limitation - AliasChoice validation type only supports selecting a single element of an array + + Returns: + DataFrame[Model]: A dataframe with columns normalized to model names. + + """ + return self.lazy().unalias().collect() + + def cast( + self: DF, strict: bool = False, columns: Optional[Sequence[str]] = None + ) -> DF: """ Cast columns to `dtypes` specified by the associated Patito model. @@ -380,7 +443,9 @@ def drop( else: return self.drop(list(set(self.columns) - set(self.model.columns))) - def validate(self: DF) -> DF: + def validate( + self: DF, columns: Optional[Sequence[str]] = None, **kwargs: Any + ) -> DF: """ Validate the schema and content of the dataframe. @@ -432,7 +497,7 @@ def validate(self: DF) -> DF: f"You must invoke {self.__class__.__name__}.set_model() " f"before invoking {self.__class__.__name__}.validate()." ) - self.model.validate(dataframe=self) + self.model.validate(dataframe=self, columns=columns, **kwargs) return self def derive(self: DF, columns: list[str] | None = None) -> DF: @@ -535,7 +600,14 @@ def fill_null( ) return self.with_columns( [ - pl.col(column).fill_null(pl.lit(default_value)) + pl.col(column).fill_null( + pl.lit(default_value, self.model.dtypes[column]) + ) + if column in self.columns + else pl.Series( + column, [default_value], self.model.dtypes[column] + ) # NOTE: hack to get around polars bug https://github.com/pola-rs/polars/issues/13602 + # else pl.lit(default_value, self.model.dtypes[column]).alias(column) for column, default_value in self.model.defaults.items() ] ).set_model(self.model) @@ -642,6 +714,9 @@ def _pydantic_model(self) -> Type[Model]: ), ) + def as_polars(self) -> pl.DataFrame: + return pl.DataFrame._from_pydf(self._df) + @classmethod def read_csv( # type: ignore[no-untyped-def] cls: Type[DF], diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index f6093cb..b136833 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -4,6 +4,8 @@ import itertools from collections.abc import Iterable from datetime import date, datetime, time, timedelta +from functools import partial +from inspect import getfullargspec from typing import ( TYPE_CHECKING, Any, @@ -120,7 +122,7 @@ def model_schema(cls: Type[ModelType]) -> Mapping[str, Mapping[str, Any]]: return schema_for_model(cls) @property - def columns(cls: Type[ModelType]) -> List[str]: + def columns(cls: Type[ModelType]) -> List[str]: # type: ignore """ Return the name of the dataframe columns specified by the fields of the model. @@ -136,7 +138,7 @@ def columns(cls: Type[ModelType]) -> List[str]: >>> Product.columns ['name', 'price'] """ - return list(cls.model_json_schema()["properties"].keys()) + return list(cls.model_fields.keys()) @property def dtypes( # type: ignore @@ -219,8 +221,9 @@ def defaults( # type: ignore {'price': 0, 'temperature_zone': 'dry'} """ return { - field_name: cls.model_fields[field_name].default - for field_name in cls.columns + field_name: props["default"] + for field_name, props in cls._schema_properties().items() + if "default" in props } @property @@ -441,12 +444,16 @@ def _from_polars( def validate( cls, dataframe: Union["pd.DataFrame", pl.DataFrame], + columns: Optional[Sequence[str]] = None, + **kwargs, ) -> None: """ Validate the schema and content of the given dataframe. Args: dataframe: Polars DataFrame to be validated. + columns: Optional list of columns to validate. If not provided, all columns + of the dataframe will be validated. Raises: patito.exceptions.ValidationError: If the given dataframe does not match @@ -482,7 +489,7 @@ def validate( temperature_zone Rows with invalid values: {'oven'}. (type=value_error.rowvalue) """ - validate(dataframe=dataframe, schema=cls) + validate(dataframe=dataframe, columns=columns, schema=cls, **kwargs) @classmethod def example_value( # noqa: C901 @@ -861,7 +868,7 @@ def examples( else: series.append(pl.lit(value, dtype=dtype).alias(column_name)) - return DataFrame().with_columns(series).with_columns(unique_series) + return cls.DataFrame().with_columns(series).with_columns(unique_series) @classmethod def join( @@ -1228,16 +1235,32 @@ def _derive_field( return field_type, field_new -def Field( - *args, - dtype: DataTypeClass - | DataType - | None = None, # TODO figure out how to make nice signature - constraints: pl.Expr | Sequence[pl.Expr] | None = None, - derived_from: str | pl.Expr | None = None, - unique: bool | None = None, - **kwargs, -) -> Any: +FIELD_KWARGS = getfullargspec(fields.Field) + + +def FieldCI( + column_info: CI, *args: Any, **kwargs: Any +) -> Any: # annotate with Any to make the downstream type annotations happy + ci = column_info(**kwargs) + for field in ci.model_fields_set: + kwargs.pop(field) + if kwargs.pop("modern_kwargs_only", True): + for kwarg in kwargs: + if kwarg not in FIELD_KWARGS.kwonlyargs and kwarg not in FIELD_KWARGS.args: + raise ValueError( + f"unexpected kwarg {kwarg}={kwargs[kwarg]}. Add modern_kwargs_only=False to ignore" + ) + return fields.Field( + *args, + json_schema_extra={"column_info": ci}, + **kwargs, + ) + + +Field = partial(FieldCI, column_info=ColumnInfo) + + +class FieldDoc: """ Annotate model field with additional type and validation information. @@ -1252,9 +1275,10 @@ def Field( All rows must satisfy the given constraint. You can refer to the given column with ``pt.field``, which will automatically be replaced with ``polars.col()`` before evaluation. - unique (bool): All row values must be unique. + derived_from (Union[str, polars.Expr]): used to mark fields that are meant to be derived from other fields. Users can specify a polars expression that will be called to derive the column value when `pt.DataFrame.derive` is called. dtype (polars.datatype.DataType): The given dataframe column must have the given polars dtype, for instance ``polars.UInt64`` or ``pl.Float32``. + unique (bool): All row values must be unique. gt: All values must be greater than ``gt``. ge: All values must be greater than or equal to ``ge``. lt: All values must be less than ``lt``. @@ -1306,14 +1330,6 @@ def Field( brand_color 2 rows with out of bound values. (type=value_error.rowvalue) """ - column_info = ColumnInfo( - dtype=dtype, constraints=constraints, derived_from=derived_from, unique=unique - ) - field_info = fields.Field( - *args, - json_schema_extra={ - "column_info": column_info # pyright: ignore TODO pydantic expects JsonDict here, how to signal this is serializable? - }, - **kwargs, - ) - return field_info + + +Field.__doc__ = FieldDoc.__doc__ diff --git a/tests/test_dummy_data.py b/tests/test_dummy_data.py index 1cb8f5d..e2f9edc 100644 --- a/tests/test_dummy_data.py +++ b/tests/test_dummy_data.py @@ -1,13 +1,13 @@ """Test of functionality related to the generation of dummy data.""" from datetime import date, datetime -from typing import List, Literal, Optional +from typing import List, Literal, Optional, Sequence import patito as pt import polars as pl import pytest -def test_model_example_df(): +def test_model_example_df() -> None: """Test for patito.Model.example().""" # When inheriting from Model you get a .examples() method for generating dataframes @@ -45,17 +45,24 @@ class MyRow(pt.Model): MyRow.examples({"a": [0], "f": [1], "g": [2]}) -def test_examples(): +def test_examples() -> None: class MyModel(pt.Model): a: int b: Optional[str] c: Optional[int] d: Optional[List[str]] = pt.Field(dtype=pl.List(pl.Utf8)) + e: List[int] df = MyModel.examples({"a": [1, 2]}) assert isinstance(df, pl.DataFrame) - assert df.dtypes == [pl.Int64, pl.Utf8, pl.Int64, pl.List] - assert df.columns == ["a", "b", "c", "d"] + assert df.dtypes == [ + pl.Int64, + pl.Utf8, + pl.Int64, + pl.List(pl.Utf8), + pl.List(pl.Int64), + ] + assert df.columns == ["a", "b", "c", "d", "e"] # A TypeError should be raised when you provide no column names with pytest.raises( @@ -65,22 +72,7 @@ class MyModel(pt.Model): MyModel.examples([[1, 2]]) -@pytest.mark.skipif("Database" not in dir(pt), reason="Requires DuckDB") -def test_creation_of_empty_relation(): - """You should be able to create a zero-row relation with correct types.""" - - class MyModel(pt.Model): - a: int - b: Optional[str] - - db = pt.duckdb.Database() - empty_relation = db.empty_relation(schema=MyModel) - assert empty_relation.columns == ["a", "b"] - assert empty_relation.types == {"a": "BIGINT", "b": "VARCHAR"} - assert empty_relation.count() == 0 - - -def test_generation_of_unique_data(): +def test_generation_of_unique_data() -> None: """Example data generators should be able to generate unique data.""" class UniqueModel(pt.Model): @@ -141,3 +133,43 @@ class DefaultEnumModel(pt.Model): assert example_model.default_enum_field == "b" assert example_model.default_optional_enum_field == "c" assert example_model.none_default_optional_enum_field is None + + +def test_nested_models() -> None: + """It should be possible to create nested models.""" + + class NestedModel(pt.Model): + nested_field: int + + class ParentModel1(pt.Model): + parent_field: int + nested_model: NestedModel + + example_model = ParentModel1.example() + example_df = ParentModel1.examples() + assert isinstance(example_model.nested_model, NestedModel) + assert example_model.nested_model.nested_field is not None + + # inheritance also works + class ParentModel2(NestedModel): + parent_field: int + + example_model = ParentModel2.example() + assert example_model.nested_field is not None + assert example_model.parent_field is not None + + # and optional nested models are ok + class ParentModel3(pt.Model): + parent_field: int + nested_model: Optional[NestedModel] = None + + example_model = ParentModel3.example() + assert example_model.nested_model is None + + # sequences of nested models also work + class ParentModel(pt.Model): + parent_field: int + nested_models: Sequence[NestedModel] + + example_model = ParentModel.example() + example_df = ParentModel.examples() diff --git a/tests/test_model.py b/tests/test_model.py index 861e8b8..792c2de 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -1,9 +1,11 @@ """Tests for patito.Model.""" +from __future__ import annotations + # pyright: reportPrivateImportUsage=false import enum import re -from datetime import date, datetime -from typing import Literal, Optional, Type +from datetime import date, datetime, time +from typing import Optional, Type import patito as pt import polars as pl @@ -19,29 +21,19 @@ FLOAT_DTYPES, INTEGER_DTYPES, ) -from pydantic import ValidationError +from pydantic import AliasChoices, AwareDatetime, ValidationError -from tests.examples import CompleteModel +from tests.examples import CompleteModel, ManyTypes, SmallModel -def test_model_example(): +def test_model_example() -> None: """Test for Model.example().""" # When inheriting from Model you get a .dummy() method for generating rows with # default values according to the type annotation. - class MyModel(pt.Model): - int_value: int - float_value: float - str_value: str - bool_value: bool - literal_value: Literal["a", "b"] - default_value: str = "my_default" - optional_value: Optional[int] - bounded_value: int = pt.Field(ge=10, le=20) - date_value: date - datetime_value: datetime - - assert MyModel.example().model_dump() == { + SmallModel.example().model_dump() + + assert ManyTypes.example().model_dump() == { "int_value": -1, "float_value": -0.5, "str_value": "dummy_string", @@ -52,8 +44,9 @@ class MyModel(pt.Model): "bounded_value": 15, "date_value": date(year=1970, month=1, day=1), "datetime_value": datetime(year=1970, month=1, day=1), + "pt_model_value": SmallModel.example().model_dump(), } - assert MyModel.example( + assert ManyTypes.example( bool_value=True, default_value="override", optional_value=1, @@ -68,8 +61,11 @@ class MyModel(pt.Model): "bounded_value": 15, "date_value": date(year=1970, month=1, day=1), "datetime_value": datetime(year=1970, month=1, day=1), + "pt_model_value": SmallModel.example().model_dump(), } + ManyTypes.validate(ManyTypes.examples({"int_value": range(200)})) + # For now, valid regex data is not implemented class RegexModel(pt.Model): regex_column: str = pt.Field(pattern=r"[0-9a-f]") @@ -81,7 +77,7 @@ class RegexModel(pt.Model): RegexModel.example() -def test_model_pandas_examples(): +def test_model_pandas_examples() -> None: """Test for Row.dummy_pandas().""" pd = pytest.importorskip("pandas") @@ -127,7 +123,7 @@ class MyRow(pt.Model): MyRow.pandas_examples([[1, 2, 3, 4]]) -def test_instantiating_model_from_row(): +def test_instantiating_model_from_row() -> None: """You should be able to instantiate models from rows.""" class Model(pt.Model): @@ -149,7 +145,7 @@ class Model(pt.Model): Model._from_polars(None) # pyright: ignore -def test_insstantiation_from_pandas_row(): +def test_insstantiation_from_pandas_row() -> None: """You should be able to instantiate models from pandas rows.""" pytest.importorskip("pandas") @@ -164,7 +160,7 @@ class Model(pt.Model): assert Model.from_row(pandas_dataframe.loc[0]).a == 1 # type: ignore -def test_model_dataframe_class_creation(): +def test_model_dataframe_class_creation() -> None: """Each model should get a custom DataFrame class.""" class CustomModel(pt.Model): @@ -181,7 +177,7 @@ class CustomModel(pt.Model): assert CustomModel.LazyFrame.model is CustomModel -def test_mapping_to_polars_dtypes(): +def test_mapping_to_polars_dtypes() -> None: """Model fields should be mappable to polars dtypes.""" assert CompleteModel.dtypes == { @@ -248,7 +244,7 @@ def test_mapping_to_polars_dtypes(): CompleteModel.validate(CompleteModel.examples({"int_column": [1, 2, 3]})) -def test_model_joins(): +def test_model_joins() -> None: """It should produce models compatible with join statements.""" class Left(pt.Model): @@ -292,7 +288,7 @@ def test_model_validator(model: Type[pt.Model]) -> None: assert Left.join(Right, how="anti") is Left -def test_model_selects(): +def test_model_selects() -> None: """It should produce models compatible with select statements.""" class MyModel(pt.Model): @@ -318,7 +314,7 @@ class MyModel(pt.Model): MyModel.select("c") -def test_model_prefix_and_suffix(): +def test_model_prefix_and_suffix() -> None: """It should produce models where all fields have been prefixed/suffixed.""" class MyModel(pt.Model): @@ -330,7 +326,7 @@ class MyModel(pt.Model): assert NewModel.nullable_columns == {"pre_a_post"} -def test_model_field_renaming(): +def test_model_field_renaming() -> None: """It should be able to change its field names.""" class MyModel(pt.Model): @@ -347,7 +343,7 @@ class MyModel(pt.Model): MyModel.rename({"c": "C"}) -def test_model_field_dropping(): +def test_model_field_dropping() -> None: """It should be able to drop a subset of its fields""" class MyModel(pt.Model): @@ -359,7 +355,7 @@ class MyModel(pt.Model): assert MyModel.drop(["b", "c"]).columns == ["a"] -def test_with_fields(): +def test_with_fields() -> None: """It should allow whe user to add additional fields.""" class MyModel(pt.Model): @@ -375,7 +371,7 @@ class MyModel(pt.Model): assert ExpandedModel.nullable_columns == set("ce") -def test_enum_annotated_field(): +def test_enum_annotated_field() -> None: """It should use values of enums to infer types.""" class ABCEnum(enum.Enum): @@ -404,47 +400,59 @@ class InvalidEnumModel(pt.Model): InvalidEnumModel.validate_schema() -def test_column_infos(): +def test_model_schema() -> None: class Model(pt.Model): - a: int - b: int = pt.Field(constraints=[(pl.col("b") < 10)]) - c: int = pt.Field(derived_from=pl.col("a") + pl.col("b")) - d: int = pt.Field(dtype=pl.UInt8) - e: int = pt.Field(unique=True) + a: int = pt.Field(ge=0, unique=True) - schema = Model.model_json_schema() # no serialization issues - props = schema[ - "properties" - ] # extra fields are stored in modified schema_properties - for col in ["b", "c", "d", "e"]: - assert "column_info" in props[col] - assert props["b"]["column_info"]["constraints"] is not None - assert props["c"]["column_info"]["derived_from"] is not None - assert props["d"]["column_info"]["dtype"] is not None - assert props["e"]["column_info"]["unique"] is not None - infos = Model.column_infos - assert infos["b"].constraints is not None - assert infos["c"].derived_from is not None - assert infos["d"].dtype is not None - assert infos["e"].unique is not None + schema = Model.model_schema + def validate_model_schema(schema) -> None: + assert set(schema) == {"properties", "required", "type", "title"} + assert schema["title"] == "Model" + assert schema["type"] == "object" + assert "a" in schema["properties"] + assert schema["properties"]["a"]["type"] == "integer" + assert schema["properties"]["a"]["minimum"] == 0 -def test_nullable_columns(): + validate_model_schema(schema) + + # nested models + class ParentModel(pt.Model): + a: int + b: Model + c: Optional[float] = None + + schema = ParentModel.model_schema + validate_model_schema( + schema["$defs"]["Model"] + ) # ensure that nested model schema is recorded in definitions + validate_model_schema( + schema["properties"]["b"] + ) # and all info is copied into field properties + assert set(schema["properties"]) == {"a", "b", "c"} + assert schema["properties"]["a"]["required"] + assert schema["properties"]["b"]["required"] + assert schema["properties"]["a"]["type"] == "integer" + assert not schema["properties"]["c"]["required"] + + +def test_nullable_columns() -> None: class Test1(pt.Model): - foo: str | None = pt.Field(dtype=pl.Utf8) + foo: Optional[str] = pt.Field(dtype=pl.Utf8) assert Test1.nullable_columns == {"foo"} assert set(Test1.valid_dtypes["foo"]) == {pl.Utf8} class Test2(pt.Model): - foo: int | None = pt.Field(dtype=pl.UInt32) + foo: Optional[int] = pt.Field(dtype=pl.UInt32) assert Test2.nullable_columns == {"foo"} assert set(Test2.valid_dtypes["foo"]) == {pl.UInt32} -def test_conflicting_type_dtype(): - with pytest.raises(ValueError, match="Invalid dtype String") as e: +def test_conflicting_type_dtype() -> None: + string_dtype_alias = "Utf8" if pl.__version__ < "0.20.3" else "String" + with pytest.raises(ValueError, match=f"Invalid dtype {string_dtype_alias}") as e: class Test1(pt.Model): foo: int = pt.Field(dtype=pl.Utf8) @@ -461,14 +469,64 @@ class Test2(pt.Model): with pytest.raises(ValueError, match="Invalid dtype UInt32") as e: class Test3(pt.Model): - foo: str | None = pt.Field(dtype=pl.UInt32) + foo: Optional[str] = pt.Field(dtype=pl.UInt32) Test3.validate_schema() -def test_polars_python_type_harmonization(): +def test_polars_python_type_harmonization() -> None: class Test(pt.Model): date: datetime = pt.Field(dtype=pl.Datetime(time_unit="us")) - # TODO add more other lesser-used type combinations here + time: time assert Test.valid_dtypes["date"] == {pl.Datetime(time_unit="us")} + assert Test.valid_dtypes["time"] == TIME_DTYPES + + +def test_column_infos() -> None: + class Model(pt.Model): + a: int + b: int = pt.Field(constraints=[(pl.col("b") < 10)]) + c: int = pt.Field(derived_from=pl.col("a") + pl.col("b")) + d: int = pt.Field(dtype=pl.UInt8) + e: int = pt.Field(unique=True) + + schema = Model.model_json_schema() # no serialization issues + props = schema[ + "properties" + ] # extra fields are stored in modified schema_properties + for col in ["b", "c", "d", "e"]: + assert "column_info" in props[col] + assert props["b"]["column_info"]["constraints"] is not None + assert props["c"]["column_info"]["derived_from"] is not None + assert props["d"]["column_info"]["dtype"] is not None + assert props["e"]["column_info"]["unique"] is not None + infos = Model.column_infos + assert infos["b"].constraints is not None + assert infos["c"].derived_from is not None + assert infos["d"].dtype is not None + assert infos["e"].unique is not None + + +def test_missing_date_struct(): + class SubModel(pt.Model): + a: int + b: AwareDatetime + + class Test(pt.Model): + a: int + b: int + c: Optional[SubModel] + + df = Test.examples({"a": range(5), "c": None}) + Test.validate(df.cast()) + + +def test_validation_alias(): + class AliasModel(pt.Model): + my_val_a: int = pt.Field(validation_alias="myValA") + my_val_b: int = pt.Field(validation_alias=AliasChoices("my_val_b", "myValB")) + + # code from validators _find_errors showing that we need model_json_schema without aliases + for column_name, column_properties in AliasModel._schema_properties().items(): + assert AliasModel.column_infos[column_name] is not None diff --git a/tests/test_polars.py b/tests/test_polars.py index 668aab9..1a9a3ad 100644 --- a/tests/test_polars.py +++ b/tests/test_polars.py @@ -1,14 +1,17 @@ """Tests related to polars functionality.""" import re from datetime import date, datetime +from typing import Optional import patito as pt import polars as pl import pytest -from pydantic import ValidationError +from pydantic import AliasChoices, AliasPath, ValidationError +from tests.examples import SmallModel -def test_dataframe_get_method(): + +def test_dataframe_get_method() -> None: """You should be able to retrieve a single row and cast to model.""" class Product(pt.Model): @@ -45,7 +48,7 @@ class Product(pt.Model): df.filter(pl.col("product_id") == 1).get() -def test_dataframe_set_model_method(): +def test_dataframe_set_model_method() -> None: """You should be able to set the associated model of a dataframe.""" class MyModel(pt.Model): @@ -56,7 +59,7 @@ class MyModel(pt.Model): assert MyModel.DataFrame.model is MyModel -def test_fill_nan_with_defaults(): +def test_fill_nan_with_defaults() -> None: """You should be able to fill missing values with declared defaults.""" class DefaultModel(pt.Model): @@ -69,7 +72,45 @@ class DefaultModel(pt.Model): assert filled_df.equals(correct_filled_df) -def test_preservation_of_model(): +def test_create_missing_columns_with_defaults() -> None: + """columns that have default values should be created if they are missing.""" + + class NestedModel(pt.Model): + foo: int = 2 + small_model: Optional[SmallModel] = None + + class DefaultModel(pt.Model): + foo: int = 2 + bar: Optional[str] = "default" + small_model: Optional[SmallModel] = None # works ok on polars==0.20.3 + nested_model: Optional[NestedModel] = None # fails to convert on polars==0.20.3 + + missing_df = pt.DataFrame({"foo": [1, 2]}) + filled_df = missing_df.set_model(DefaultModel).fill_null(strategy="defaults") + correct_filled_df = pl.DataFrame( + { + "foo": [1, 2], + "bar": ["default", "default"], + "small_model": [None, None], + "nested_model": [None, None], + }, + schema=DefaultModel.dtypes, + ) + assert filled_df.equals(correct_filled_df) + + +def test_create_missing_columns_with_dtype() -> None: + class DefaultModel(pt.Model): + foo: int + bar: Optional[int] = None + + missing_df = pt.DataFrame({"foo": [1, 2]}) + filled_df = missing_df.set_model(DefaultModel).fill_null(strategy="defaults") + assert "bar" in filled_df.columns + assert filled_df.dtypes[1] == pl.Int64 + + +def test_preservation_of_model() -> None: """The model should be preserved on data frames after method invocations.""" class DummyModel(pt.Model): @@ -106,11 +147,12 @@ class AnotherDummyModel(pt.Model): assert type(pt.DataFrame().lazy().collect()) is pt.DataFrame -def test_dataframe_model_dtype_casting(): +def test_dataframe_model_dtype_casting() -> None: """You should be able to cast columns according to model type annotations.""" class DTypeModel(pt.Model): - implicit_int_2: int + implicit_int: int + explicit_uint: int = pt.Field(dtype=pl.UInt64) implicit_date: date implicit_datetime: datetime @@ -118,7 +160,7 @@ class DTypeModel(pt.Model): [ # UInt32 is compatible with the "int" annotation, and since no explicit # dtype is specified, it will not be casted to the default pl.Int64 - pl.lit(1).cast(pl.UInt32).alias("implicit_int_2"), + pl.lit(1).cast(pl.UInt32).alias("implicit_int"), # The integer will be casted to datetime 1970-01-01 00:00:00 pl.lit(0).cast(pl.Int64).alias("implicit_date"), # The integer will be casted to date 1970-01-01 @@ -143,6 +185,16 @@ class DTypeModel(pt.Model): pl.Boolean, ] + some_columns_df = original_df.cast( + strict=True, columns=["implicit_int", "implicit_date"] + ) + assert some_columns_df.dtypes == [ + pl.Int64, + pl.Date, + pl.Int64, # not casted + pl.Boolean, + ] + @pytest.mark.xfail(strict=True) def test_correct_columns_and_dtype_on_read(tmp_path): @@ -188,7 +240,7 @@ class DerivedModel(pt.Model): ) -def test_derive_functionality(): +def test_derive_functionality() -> None: """Test of Field(derived_from=...) and DataFrame.derive().""" class DerivedModel(pt.Model): @@ -198,6 +250,13 @@ class DerivedModel(pt.Model): expr_derived: int = pt.Field(derived_from=2 * pl.col("underived")) second_order_derived: int = pt.Field(derived_from=2 * pl.col("expr_derived")) + assert DerivedModel.derived_columns == { + "const_derived", + "column_derived", + "expr_derived", + "second_order_derived", + } + df = DerivedModel.DataFrame({"underived": [1, 2]}) assert df.columns == ["underived"] derived_df = df.derive() @@ -216,10 +275,100 @@ class DerivedModel(pt.Model): with pytest.raises(ValidationError): class InvalidModel(pt.Model): - incompatible: int = pt.Field(derived_from=object) # pyright: ignore + incompatible: int = pt.Field(derived_from=object) + + +def test_recursive_derive() -> None: + """Data.Frame.derive() infers proper derivation order and executes it, then returns columns in the order given by the model.""" + + class DerivedModel(pt.Model): + underived: int + const_derived: int = pt.Field(derived_from=pl.lit(3)) + second_order_derived: int = pt.Field( + derived_from=2 * pl.col("expr_derived") + ) # requires expr_derived to be derived first + column_derived: int = pt.Field(derived_from="underived") + expr_derived: int = pt.Field(derived_from=2 * pl.col("underived")) + + df = DerivedModel.DataFrame({"underived": [1, 2]}) + assert df.columns == ["underived"] + derived_df = df.derive() + + correct_derived_df = DerivedModel.DataFrame( + { + "underived": [1, 2], + "const_derived": [3, 3], + "second_order_derived": [4, 8], + "column_derived": [1, 2], + "expr_derived": [ + 2, + 4, + ], # derived before second_order_derived, but remains in last position in output df according to the model + } + ) + assert derived_df.equals(correct_derived_df) -def test_drop_method(): +def test_derive_subset() -> None: + class DerivedModel(pt.Model): + underived: int + derived: Optional[int] = pt.Field(default=None, derived_from="underived") + expr_derived: int = pt.Field( + derived_from=2 * pl.col("derived") + ) # depends on derived + + df = DerivedModel.DataFrame({"underived": [1, 2]}) + correct_derived_df = DerivedModel.DataFrame( + { + "underived": [1, 2], + "expr_derived": [2, 4], + } + ) + assert df.derive( + columns=["expr_derived"] + ).equals( + correct_derived_df + ) # only include "expr_derived" in output, but ensure that "derived" was derived recursively + + +def test_derive_on_defaults() -> None: + class DerivedModel(pt.Model): + underived: int + derived: Optional[int] = pt.Field(default=None, derived_from="underived") + + df = DerivedModel.DataFrame([DerivedModel(underived=1), DerivedModel(underived=2)]) + derived_df = df.derive() + + correct_derived_df = DerivedModel.DataFrame( + { + "underived": [1, 2], + "derived": [1, 2], + } + ) + assert derived_df.equals(correct_derived_df) + + +def test_lazy_derive() -> None: + class DerivedModel(pt.Model): + underived: int + derived: Optional[int] = pt.Field(default=None, derived_from="underived") + + ldf = DerivedModel.DataFrame({"underived": [1, 2]}).lazy() + assert ldf.columns == ["underived"] + derived_ldf = ldf.derive() + assert derived_ldf.columns == ["underived", "derived"] + df = derived_ldf.collect() + + correct_derived_df = DerivedModel.DataFrame( + { + "underived": [1, 2], + "derived": [1, 2], + } + ) + assert df.equals(correct_derived_df) + + +def test_drop_method() -> None: """We should be able to drop columns not specified by the data frame model.""" class Model(pt.Model): @@ -239,3 +388,75 @@ class Model(pt.Model): # Or a list of columns assert df.drop(["column_1", "column_2"]).columns == [] + + +def test_polars_conversion(): + """You should be able to convert a DataFrame to a polars DataFrame.""" + + class Model(pt.Model): + a: int + b: str + + df = Model.DataFrame({"a": [1, 2], "b": ["foo", "bar"]}) + polars_df = df.as_polars() + assert isinstance(polars_df, pl.DataFrame) + assert not isinstance(polars_df, pt.DataFrame) + assert polars_df.shape == (2, 2) + assert polars_df.columns == ["a", "b"] + assert polars_df.dtypes == [pl.Int64, pl.Utf8] + + +def test_validation_alias() -> None: + class AliasModel(pt.Model): + my_val_a: int = pt.Field(validation_alias="myValA") + my_val_b: int = pt.Field( + validation_alias=AliasChoices("my_val_b", "myValB", "myValB2") + ) + my_val_c: int + first_name: str = pt.Field(validation_alias=AliasPath("names", 0)) + last_name: str = pt.Field( + validation_alias=AliasChoices("lastName", AliasPath("names", 1)) + ) + + examples = [ + {"myValA": 1, "myValB": 1, "my_val_c": 1, "names": ["fname1", "lname1"]}, + {"myValA": 2, "myValB": 2, "my_val_c": 2, "names": ["fname2", "lname2"]}, + { + "my_val_a": 3, + "myValB2": 3, + "my_val_c": 3, + "names": ["fname3"], + "last_name": "lname3", + }, + { + "my_val_a": 4, + "my_val_b": 4, + "my_val_c": 4, + "first_name": "fname4", + "last_name": "lname4", + }, + ] + + # check record with all aliases + df = ( + AliasModel.LazyFrame([examples[0]]) + .unalias() + .cast(strict=True) + .collect() + .validate() + ) + assert df.columns == AliasModel.columns + + # check record with no aliases + df = ( + AliasModel.LazyFrame([examples[3]]) + .unalias() + .cast(strict=True) + .collect() + .validate() + ) + assert df.columns == AliasModel.columns + + # check records with mixed aliases + df = AliasModel.LazyFrame(examples).unalias().cast(strict=True).collect().validate() + assert df.columns == AliasModel.columns diff --git a/tests/test_validators.py b/tests/test_validators.py index e8cceaa..2a81c33 100644 --- a/tests/test_validators.py +++ b/tests/test_validators.py @@ -1,4 +1,6 @@ """Tests for the patito.validators module.""" +from __future__ import annotations + import enum import re import sys @@ -12,7 +14,7 @@ from patito.validators import _dewrap_optional, _is_optional, validate -def test_is_optional(): +def test_is_optional() -> None: """It should return True for optional types.""" assert _is_optional(Optional[int]) assert _is_optional(Union[int, None]) @@ -23,12 +25,12 @@ def test_is_optional(): sys.version_info <= (3, 10), reason="Using | as a type union operator is only supported from python 3.10.", ) -def test_is_optional_with_pipe_operator(): +def test_is_optional_with_pipe_operator() -> None: """It should return True for optional types.""" assert _is_optional(int | None) # typing: ignore # pragma: noqa # pyright: ignore -def test_dewrap_optional(): +def test_dewrap_optional() -> None: """It should return the inner type of Optional types.""" assert _dewrap_optional(Optional[int]) is int assert _dewrap_optional(Union[int, None]) is int @@ -39,14 +41,14 @@ def test_dewrap_optional(): sys.version_info <= (3, 10), reason="Using | as a type union operator is only supported from python 3.10.", ) -def test_dewrap_optional_with_pipe_operator(): +def test_dewrap_optional_with_pipe_operator() -> None: """It should return the inner type of Optional types.""" assert ( # typing: ignore # pragma: noqa # pyright: ignore _dewrap_optional(int | None) is int ) -def test_missing_column_validation(): +def test_missing_column_validation() -> None: """Validation should catch missing columns.""" class SingleColumnModel(pt.Model): @@ -72,22 +74,35 @@ class SingleColumnModel(pt.Model): }, ] + df_missing_column_2 = pl.DataFrame({"column_1": [1, 2, 3]}) + with pytest.raises(DataFrameValidationError) as e_info: + validate(dataframe=df_missing_column_2, schema=SingleColumnModel) + validate( + dataframe=df_missing_column_2, + schema=SingleColumnModel, + allow_missing_columns=True, + ) # does not raise when allow_missing_columns=True + SingleColumnModel.validate( + df_missing_column_2, allow_missing_columns=True + ) # kwargs are passed via model-centric validation API + -def test_superfluous_column_validation(): - """Validation should catch superflous columns.""" +def test_superfluous_column_validation() -> None: + """Validation should catch superfluous columns.""" class SingleColumnModel(pt.Model): column_1: int # We raise an error because we have added column_2 + test_df = pl.DataFrame().with_columns( + [ + pl.lit(1).alias("column_1"), + pl.lit(2).alias("column_2"), + ] + ) with pytest.raises(DataFrameValidationError) as e_info: validate( - dataframe=pl.DataFrame().with_columns( - [ - pl.lit(1).alias("column_1"), - pl.lit(2).alias("column_2"), - ] - ), + dataframe=test_df, schema=SingleColumnModel, ) @@ -99,8 +114,15 @@ class SingleColumnModel(pt.Model): "type": "type_error.superfluouscolumns", } + validate( + test_df, SingleColumnModel, allow_superfluous_columns=True + ) # does not raise + SingleColumnModel.validate( + test_df, allow_superfluous_columns=True + ) # model-centric API also works + -def test_validate_non_nullable_columns(): +def test_validate_non_nullable_columns() -> None: """Test for validation logic related to missing values.""" class SmallModel(pt.Model): @@ -129,7 +151,7 @@ class SmallModel(pt.Model): } -def test_validate_dtype_checks(): +def test_validate_dtype_checks() -> None: """Test dtype-checking logic.""" # An integer field may be validated against several different integer dtypes @@ -141,7 +163,7 @@ class IntModel(pt.Model): dataframe = pl.DataFrame([series]) validate(dataframe=dataframe, schema=IntModel) - # But other types, including floating point types, must be considered invalid + # But other types must be considered invalid for dtype in (pl.Utf8, pl.Date): series = pl.Series([], dtype=dtype).alias("column") dataframe = pl.DataFrame([series]) @@ -203,7 +225,7 @@ class CompleteModel(pt.Model): # Anything non-compatible with polars should raise NotImplementedError with pytest.raises(ValueError, match="not compatible with any polars dtypes"): - class NonCompatibleModel(pt.Model): # TODO catch value error + class NonCompatibleModel(pt.Model): my_field: object NonCompatibleModel.validate_schema() @@ -227,7 +249,7 @@ class PandasCompatibleModel(CompleteModel): ) -def test_uniqueness_validation(): +def test_uniqueness_validation() -> None: """It should be able to validate uniqueness.""" class MyModel(pt.Model): @@ -244,7 +266,7 @@ class MyModel(pt.Model): MyModel.validate(duplicated_df) -def test_datetime_validation(): +def test_datetime_validation() -> None: """ Test for date(time) validation. @@ -289,7 +311,7 @@ class DateTimeModel(pt.Model): validate(dataframe=date_df, schema=DateTimeModel) -def test_enum_validation(): +def test_enum_validation() -> None: """Test validation of enum.Enum-typed fields.""" class ABCEnum(enum.Enum): @@ -316,7 +338,7 @@ class EnumModel(pt.Model): } -def test_literal_enum_validation(): +def test_literal_enum_validation() -> None: """Test validation of typing.Literal-typed fields.""" class EnumModel(pt.Model): @@ -338,7 +360,7 @@ class EnumModel(pt.Model): } -def test_uniqueness_constraint_validation(): +def test_uniqueness_constraint_validation() -> None: """Uniqueness constraints should be validated.""" class UniqueModel(pt.Model): @@ -358,7 +380,7 @@ class UniqueModel(pt.Model): } -def test_validation_of_bounds_checks(): +def test_validation_of_bounds_checks() -> None: """Check if value bounds are correctly validated.""" class BoundModel(pt.Model): @@ -404,7 +426,7 @@ class BoundModel(pt.Model): } -def test_validation_of_dtype_specifiers(): +def test_validation_of_dtype_specifiers() -> None: """Fields with specific dtype annotations should be validated.""" class DTypeModel(pt.Model): @@ -465,7 +487,7 @@ class DTypeModel(pt.Model): } -def test_custom_constraint_validation(): +def test_custom_constraint_validation() -> None: """Users should be able to specify custom constraints.""" class CustomConstraintModel(pt.Model): @@ -503,7 +525,7 @@ class PizzaSlice(pt.Model): PizzaSlice.validate(part_pizza) -def test_anonymous_column_constraints(): +def test_anonymous_column_constraints() -> None: """You should be able to refer to the field column with an anonymous column.""" class Pair(pt.Model): @@ -525,7 +547,7 @@ class Pair(pt.Model): ) -def test_optional_enum(): +def test_optional_enum() -> None: """It should handle optional enums correctly.""" class OptionalEnumModel(pt.Model): @@ -540,7 +562,7 @@ class OptionalEnumModel(pt.Model): sys.version_info <= (3, 10), reason="Using | as a type union operator is only supported from python 3.10.", ) -def test_optional_pipe_operator(): +def test_optional_pipe_operator() -> None: class OptionalEnumModel(pt.Model): # Old type annotation syntax optional_enum_1: Optional[Literal["A", "B"]] @@ -562,7 +584,7 @@ class OptionalEnumModel(pt.Model): raises=TypeError, strict=True, ) -def test_validation_of_list_dtypes(): +def test_validation_of_list_dtypes() -> None: """It should be able to validate dtypes organized in lists.""" class ListModel(pt.Model): @@ -598,11 +620,11 @@ class ListModel(pt.Model): ListModel.validate(valid_df.with_columns(pl.col(old).alias(new))) -def test_nested_field_attrs(): +def test_nested_field_attrs() -> None: """ensure that constraints are respected even when embedded inside 'anyOf'""" class Test(pt.Model): - foo: int | None = pt.Field( + foo: Optional[int] = pt.Field( dtype=pl.Int64, ge=0, le=100, constraints=pt.field.sum() == 100 ) @@ -616,3 +638,26 @@ class Test(pt.Model): null_test_df = Test.DataFrame({"foo": [100, None, None]}) Test.validate(null_test_df) # should not raise + + +def test_validation_column_subset() -> None: + """ensure that columns are only validated if they are in the subset""" + + class Test(pt.Model): + a: int + b: int = pt.Field(dtype=pl.Int64, ge=0, le=100) + + Test.validate(pl.DataFrame({"a": [1, 2, 3], "b": [1, 2, 3]})) # should pass + with pytest.raises(DataFrameValidationError): + Test.validate(pl.DataFrame({"a": [1, 2, 3], "b": [101, 102, 103]})) + + # should pass without validating b + Test.validate(pl.DataFrame({"a": [1, 2, 3], "b": [101, 102, 103]}), columns=["a"]) + + with pytest.raises(DataFrameValidationError): + Test.validate( + pl.DataFrame({"a": [1, 2, 3], "b": [101, 102, 103]}), columns=["b"] + ) + # test asking for superfluous column + with pytest.raises(DataFrameValidationError): + Test.validate(pl.DataFrame({"a": [1, 2, 3], "b": [1, 2, 3]}), columns=["c"]) From 48ed78246d782a8fb73849c13682baf15940f855 Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Fri, 9 Feb 2024 16:18:40 -0500 Subject: [PATCH 25/29] bug: instantiated dtypes, constraints on examples, tz-less datetimes --- src/patito/_pydantic/dtypes/dtypes.py | 4 +++- src/patito/pydantic.py | 18 +++++++++++++----- tests/examples.py | 1 + tests/test_dummy_data.py | 6 +++++- tests/test_model.py | 2 ++ 5 files changed, 24 insertions(+), 7 deletions(-) diff --git a/src/patito/_pydantic/dtypes/dtypes.py b/src/patito/_pydantic/dtypes/dtypes.py index 3f343f1..80a2d66 100644 --- a/src/patito/_pydantic/dtypes/dtypes.py +++ b/src/patito/_pydantic/dtypes/dtypes.py @@ -52,7 +52,9 @@ def default_dtypes_for_model( else: default_dtypes[column] = default_dtype else: - default_dtypes[column] = dtype + default_dtypes[column] = ( + dtype if isinstance(dtype, DataType) else dtype() + ) # if dtype is not instantiated, instantiate it return default_dtypes diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index b136833..68cd844 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -572,8 +572,14 @@ def example_value( # noqa: C901 elif field_type in {"integer", "number"}: # For integer and float types we must check if there are imposed bounds - lower = properties.get("minimum") or properties.get("exclusiveMinimum") - upper = properties.get("maximum") or properties.get("exclusiveMaximum") + + minimum = properties.get("minimum") + exclusive_minimum = properties.get("exclusiveMinimum") + maximum = properties.get("maximum") + exclusive_maximum = properties.get("exclusiveMaximum") + + lower = minimum if minimum is not None else exclusive_minimum + upper = maximum if maximum is not None else exclusive_maximum # If the dtype is an unsigned integer type, we must return a positive value if info.dtype: @@ -615,9 +621,11 @@ def example_value( # noqa: C901 if "column_info" in properties: dtype_str = properties["column_info"]["dtype"] dtype = dtype_from_string(dtype_str) - return datetime( - year=1970, month=1, day=1, tzinfo=ZoneInfo(dtype.time_zone) - ) + if getattr(dtype, "time_zone", None) is not None: + tzinfo = ZoneInfo(dtype.time_zone) + else: + tzinfo = None + return datetime(year=1970, month=1, day=1, tzinfo=tzinfo) return datetime(year=1970, month=1, day=1) elif "format" in properties and properties["format"] == "time": return time(12, 30) diff --git a/tests/examples.py b/tests/examples.py index 2fd6790..dd3c549 100644 --- a/tests/examples.py +++ b/tests/examples.py @@ -40,6 +40,7 @@ class CompleteModel(pt.Model): date_column: date datetime_column: datetime + datetime_column2: datetime = pt.Field(dtype=pl.Datetime) aware_datetime_column: AwareDatetime = pt.Field(dtype=pl.Datetime(time_zone="UTC")) duration_column: timedelta time_column: time diff --git a/tests/test_dummy_data.py b/tests/test_dummy_data.py index e2f9edc..8a3e282 100644 --- a/tests/test_dummy_data.py +++ b/tests/test_dummy_data.py @@ -52,6 +52,7 @@ class MyModel(pt.Model): c: Optional[int] d: Optional[List[str]] = pt.Field(dtype=pl.List(pl.Utf8)) e: List[int] + f: int = pt.Field(ge=0) df = MyModel.examples({"a": [1, 2]}) assert isinstance(df, pl.DataFrame) @@ -61,8 +62,11 @@ class MyModel(pt.Model): pl.Int64, pl.List(pl.Utf8), pl.List(pl.Int64), + pl.Int64, ] - assert df.columns == ["a", "b", "c", "d", "e"] + assert df.columns == ["a", "b", "c", "d", "e", "f"] + assert (df["f"] >= 0).all() + MyModel.validate(df) # A TypeError should be raised when you provide no column names with pytest.raises( diff --git a/tests/test_model.py b/tests/test_model.py index 792c2de..2cb685b 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -187,6 +187,7 @@ def test_mapping_to_polars_dtypes() -> None: "bool_column": pl.Boolean(), "date_column": pl.Date(), "datetime_column": pl.Datetime(), + "datetime_column2": pl.Datetime(), "aware_datetime_column": pl.Datetime(time_zone="UTC"), "duration_column": pl.Duration(), "time_column": pl.Time(), @@ -213,6 +214,7 @@ def test_mapping_to_polars_dtypes() -> None: "bool_column": {pl.Boolean}, "date_column": DATE_DTYPES, "datetime_column": DATETIME_DTYPES, + "datetime_column2": {pl.Datetime()}, "aware_datetime_column": {pl.Datetime(time_zone="UTC")}, "duration_column": DURATION_DTYPES, "time_column": TIME_DTYPES, From a2459a97bb608da9d251869fedee4e7a68cd0d3d Mon Sep 17 00:00:00 2001 From: Brendan Cooley Date: Wed, 14 Feb 2024 11:37:11 -0500 Subject: [PATCH 26/29] chore: docs cleanup, doctests running, partial linting --- .readthedocs.yaml | 1 - docs/api/patito/Model/index.rst | 1 - .../patito/duckdb/Database/__contains__.rst | 6 - .../duckdb/Database/create_enum_types.rst | 6 - .../patito/duckdb/Database/create_table.rst | 6 - .../patito/duckdb/Database/create_view.rst | 6 - docs/api/patito/duckdb/Database/default.rst | 6 - .../patito/duckdb/Database/empty_relation.rst | 6 - docs/api/patito/duckdb/Database/execute.rst | 8 - .../duckdb/Database/from_connection.rst | 6 - docs/api/patito/duckdb/Database/index.rst | 24 -- docs/api/patito/duckdb/Database/query.rst | 8 - docs/api/patito/duckdb/Database/table.rst | 8 - .../patito/duckdb/Database/to_relation.rst | 6 - docs/api/patito/duckdb/Relation/__add__.rst | 6 - .../patito/duckdb/Relation/__getitem__.rst | 6 - docs/api/patito/duckdb/Relation/__iter__.rst | 8 - docs/api/patito/duckdb/Relation/__len__.rst | 8 - docs/api/patito/duckdb/Relation/__str__.rst | 6 - .../api/patito/duckdb/Relation/add_prefix.rst | 6 - .../api/patito/duckdb/Relation/add_suffix.rst | 6 - docs/api/patito/duckdb/Relation/aggregate.rst | 6 - docs/api/patito/duckdb/Relation/alias.rst | 7 - docs/api/patito/duckdb/Relation/all.rst | 6 - docs/api/patito/duckdb/Relation/case.rst | 6 - docs/api/patito/duckdb/Relation/cast.rst | 8 - docs/api/patito/duckdb/Relation/coalesce.rst | 6 - docs/api/patito/duckdb/Relation/columns.rst | 6 - docs/api/patito/duckdb/Relation/count.rst | 8 - .../patito/duckdb/Relation/create_table.rst | 8 - .../patito/duckdb/Relation/create_view.rst | 8 - docs/api/patito/duckdb/Relation/distinct.rst | 8 - docs/api/patito/duckdb/Relation/drop.rst | 6 - docs/api/patito/duckdb/Relation/except_.rst | 8 - docs/api/patito/duckdb/Relation/execute.rst | 6 - docs/api/patito/duckdb/Relation/filter.rst | 8 - docs/api/patito/duckdb/Relation/get.rst | 8 - docs/api/patito/duckdb/Relation/index.rst | 57 ----- .../api/patito/duckdb/Relation/inner_join.rst | 8 - .../patito/duckdb/Relation/insert_into.rst | 8 - docs/api/patito/duckdb/Relation/intersect.rst | 8 - docs/api/patito/duckdb/Relation/join.rst | 8 - docs/api/patito/duckdb/Relation/left_join.rst | 8 - docs/api/patito/duckdb/Relation/limit.rst | 8 - docs/api/patito/duckdb/Relation/model.rst | 9 - docs/api/patito/duckdb/Relation/order.rst | 8 - docs/api/patito/duckdb/Relation/rename.rst | 6 - docs/api/patito/duckdb/Relation/select.rst | 8 - docs/api/patito/duckdb/Relation/set_alias.rst | 8 - docs/api/patito/duckdb/Relation/set_model.rst | 8 - docs/api/patito/duckdb/Relation/to_df.rst | 6 - docs/api/patito/duckdb/Relation/to_pandas.rst | 6 - docs/api/patito/duckdb/Relation/to_series.rst | 6 - docs/api/patito/duckdb/Relation/types.rst | 8 - docs/api/patito/duckdb/Relation/union.rst | 8 - .../patito/duckdb/Relation/with_columns.rst | 6 - .../with_missing_defaultable_columns.rst | 6 - .../with_missing_nullable_columns.rst | 6 - docs/api/patito/duckdb/index.rst | 9 - docs/index.rst | 14 +- noxfile.py | 31 ++- poetry.lock | 165 ++++++------ pyproject.toml | 12 +- src/patito/_pydantic/column_info.py | 6 +- src/patito/_pydantic/dtypes/__init__.py | 2 + src/patito/_pydantic/dtypes/dtypes.py | 12 +- src/patito/_pydantic/dtypes/utils.py | 31 ++- src/patito/_pydantic/repr.py | 14 +- src/patito/_pydantic/schema.py | 10 +- src/patito/polars.py | 126 +++++---- src/patito/pydantic.py | 241 ++++++++++++------ src/patito/validators.py | 50 ++-- src/patito/xdg.py | 6 +- tests/test_model.py | 2 - tests/test_polars.py | 2 +- tests/test_validators.py | 19 +- 76 files changed, 432 insertions(+), 781 deletions(-) delete mode 100644 docs/api/patito/duckdb/Database/__contains__.rst delete mode 100644 docs/api/patito/duckdb/Database/create_enum_types.rst delete mode 100644 docs/api/patito/duckdb/Database/create_table.rst delete mode 100644 docs/api/patito/duckdb/Database/create_view.rst delete mode 100644 docs/api/patito/duckdb/Database/default.rst delete mode 100644 docs/api/patito/duckdb/Database/empty_relation.rst delete mode 100644 docs/api/patito/duckdb/Database/execute.rst delete mode 100644 docs/api/patito/duckdb/Database/from_connection.rst delete mode 100644 docs/api/patito/duckdb/Database/index.rst delete mode 100644 docs/api/patito/duckdb/Database/query.rst delete mode 100644 docs/api/patito/duckdb/Database/table.rst delete mode 100644 docs/api/patito/duckdb/Database/to_relation.rst delete mode 100644 docs/api/patito/duckdb/Relation/__add__.rst delete mode 100644 docs/api/patito/duckdb/Relation/__getitem__.rst delete mode 100644 docs/api/patito/duckdb/Relation/__iter__.rst delete mode 100644 docs/api/patito/duckdb/Relation/__len__.rst delete mode 100644 docs/api/patito/duckdb/Relation/__str__.rst delete mode 100644 docs/api/patito/duckdb/Relation/add_prefix.rst delete mode 100644 docs/api/patito/duckdb/Relation/add_suffix.rst delete mode 100644 docs/api/patito/duckdb/Relation/aggregate.rst delete mode 100644 docs/api/patito/duckdb/Relation/alias.rst delete mode 100644 docs/api/patito/duckdb/Relation/all.rst delete mode 100644 docs/api/patito/duckdb/Relation/case.rst delete mode 100644 docs/api/patito/duckdb/Relation/cast.rst delete mode 100644 docs/api/patito/duckdb/Relation/coalesce.rst delete mode 100644 docs/api/patito/duckdb/Relation/columns.rst delete mode 100644 docs/api/patito/duckdb/Relation/count.rst delete mode 100644 docs/api/patito/duckdb/Relation/create_table.rst delete mode 100644 docs/api/patito/duckdb/Relation/create_view.rst delete mode 100644 docs/api/patito/duckdb/Relation/distinct.rst delete mode 100644 docs/api/patito/duckdb/Relation/drop.rst delete mode 100644 docs/api/patito/duckdb/Relation/except_.rst delete mode 100644 docs/api/patito/duckdb/Relation/execute.rst delete mode 100644 docs/api/patito/duckdb/Relation/filter.rst delete mode 100644 docs/api/patito/duckdb/Relation/get.rst delete mode 100644 docs/api/patito/duckdb/Relation/index.rst delete mode 100644 docs/api/patito/duckdb/Relation/inner_join.rst delete mode 100644 docs/api/patito/duckdb/Relation/insert_into.rst delete mode 100644 docs/api/patito/duckdb/Relation/intersect.rst delete mode 100644 docs/api/patito/duckdb/Relation/join.rst delete mode 100644 docs/api/patito/duckdb/Relation/left_join.rst delete mode 100644 docs/api/patito/duckdb/Relation/limit.rst delete mode 100644 docs/api/patito/duckdb/Relation/model.rst delete mode 100644 docs/api/patito/duckdb/Relation/order.rst delete mode 100644 docs/api/patito/duckdb/Relation/rename.rst delete mode 100644 docs/api/patito/duckdb/Relation/select.rst delete mode 100644 docs/api/patito/duckdb/Relation/set_alias.rst delete mode 100644 docs/api/patito/duckdb/Relation/set_model.rst delete mode 100644 docs/api/patito/duckdb/Relation/to_df.rst delete mode 100644 docs/api/patito/duckdb/Relation/to_pandas.rst delete mode 100644 docs/api/patito/duckdb/Relation/to_series.rst delete mode 100644 docs/api/patito/duckdb/Relation/types.rst delete mode 100644 docs/api/patito/duckdb/Relation/union.rst delete mode 100644 docs/api/patito/duckdb/Relation/with_columns.rst delete mode 100644 docs/api/patito/duckdb/Relation/with_missing_defaultable_columns.rst delete mode 100644 docs/api/patito/duckdb/Relation/with_missing_nullable_columns.rst delete mode 100644 docs/api/patito/duckdb/index.rst diff --git a/.readthedocs.yaml b/.readthedocs.yaml index b863077..12108f6 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -5,7 +5,6 @@ python: - method: pip path: . extra_requirements: - - duckdb - pandas - docs diff --git a/docs/api/patito/Model/index.rst b/docs/api/patito/Model/index.rst index 3ecd10c..c990ac7 100644 --- a/docs/api/patito/Model/index.rst +++ b/docs/api/patito/Model/index.rst @@ -32,6 +32,5 @@ patito.Model suffix unique_columns valid_dtypes - valid_sql_types validate with_fields diff --git a/docs/api/patito/duckdb/Database/__contains__.rst b/docs/api/patito/duckdb/Database/__contains__.rst deleted file mode 100644 index 97d43f8..0000000 --- a/docs/api/patito/duckdb/Database/__contains__.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Database.__contains__ -=================================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Database.__contains__ diff --git a/docs/api/patito/duckdb/Database/create_enum_types.rst b/docs/api/patito/duckdb/Database/create_enum_types.rst deleted file mode 100644 index cc917b8..0000000 --- a/docs/api/patito/duckdb/Database/create_enum_types.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Database.create_enum_types -======================================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Database.create_enum_types diff --git a/docs/api/patito/duckdb/Database/create_table.rst b/docs/api/patito/duckdb/Database/create_table.rst deleted file mode 100644 index e73d253..0000000 --- a/docs/api/patito/duckdb/Database/create_table.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Database.create_table -=================================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Database.create_table diff --git a/docs/api/patito/duckdb/Database/create_view.rst b/docs/api/patito/duckdb/Database/create_view.rst deleted file mode 100644 index 0a34382..0000000 --- a/docs/api/patito/duckdb/Database/create_view.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Database.create_view -================================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Database.create_view diff --git a/docs/api/patito/duckdb/Database/default.rst b/docs/api/patito/duckdb/Database/default.rst deleted file mode 100644 index caf90a0..0000000 --- a/docs/api/patito/duckdb/Database/default.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Database.default -============================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Database.default diff --git a/docs/api/patito/duckdb/Database/empty_relation.rst b/docs/api/patito/duckdb/Database/empty_relation.rst deleted file mode 100644 index bc8b5b6..0000000 --- a/docs/api/patito/duckdb/Database/empty_relation.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Database.empty_relation -===================================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Database.empty_relation diff --git a/docs/api/patito/duckdb/Database/execute.rst b/docs/api/patito/duckdb/Database/execute.rst deleted file mode 100644 index 10ff1d6..0000000 --- a/docs/api/patito/duckdb/Database/execute.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _Database.execute: - -patito.duckdb.Database.execute -============================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Database.execute diff --git a/docs/api/patito/duckdb/Database/from_connection.rst b/docs/api/patito/duckdb/Database/from_connection.rst deleted file mode 100644 index f7f7a20..0000000 --- a/docs/api/patito/duckdb/Database/from_connection.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Database.from_connection -====================================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Database.from_connection diff --git a/docs/api/patito/duckdb/Database/index.rst b/docs/api/patito/duckdb/Database/index.rst deleted file mode 100644 index cb6cee2..0000000 --- a/docs/api/patito/duckdb/Database/index.rst +++ /dev/null @@ -1,24 +0,0 @@ -.. _duckdb.Database: - -patito.duckdb.Database -====================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Database.__init__ - -.. toctree:: - :caption: Methods - :maxdepth: 1 - - create_enum_types - create_table - create_view - default - empty_relation - execute - from_connection - query - table - to_relation - __contains__ <__contains__> diff --git a/docs/api/patito/duckdb/Database/query.rst b/docs/api/patito/duckdb/Database/query.rst deleted file mode 100644 index 36315cd..0000000 --- a/docs/api/patito/duckdb/Database/query.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _duckdb.Database.query: - -patito.duckdb.Database.query -============================ - -.. currentmodule:: patito.duckdb - -.. automethod:: Database.query diff --git a/docs/api/patito/duckdb/Database/table.rst b/docs/api/patito/duckdb/Database/table.rst deleted file mode 100644 index d2456d3..0000000 --- a/docs/api/patito/duckdb/Database/table.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _Database.table: - -patito.duckdb.Database.table -============================ - -.. currentmodule:: patito.duckdb - -.. automethod:: Database.table diff --git a/docs/api/patito/duckdb/Database/to_relation.rst b/docs/api/patito/duckdb/Database/to_relation.rst deleted file mode 100644 index 44f4bb3..0000000 --- a/docs/api/patito/duckdb/Database/to_relation.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Database.to_relation -================================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Database.to_relation diff --git a/docs/api/patito/duckdb/Relation/__add__.rst b/docs/api/patito/duckdb/Relation/__add__.rst deleted file mode 100644 index 49920b6..0000000 --- a/docs/api/patito/duckdb/Relation/__add__.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Relation.__add__ -============================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.__add__ diff --git a/docs/api/patito/duckdb/Relation/__getitem__.rst b/docs/api/patito/duckdb/Relation/__getitem__.rst deleted file mode 100644 index 0dc4e7f..0000000 --- a/docs/api/patito/duckdb/Relation/__getitem__.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Relation.__getitem__ -================================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.__getitem__ diff --git a/docs/api/patito/duckdb/Relation/__iter__.rst b/docs/api/patito/duckdb/Relation/__iter__.rst deleted file mode 100644 index 6ed4f7d..0000000 --- a/docs/api/patito/duckdb/Relation/__iter__.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _duckdb.Relation.__iter__: - -patito.duckdb.Relation.__iter__ -=============================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.__iter__ diff --git a/docs/api/patito/duckdb/Relation/__len__.rst b/docs/api/patito/duckdb/Relation/__len__.rst deleted file mode 100644 index 4332cff..0000000 --- a/docs/api/patito/duckdb/Relation/__len__.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _duckdb.Relation.__len__: - -patito.duckdb.Relation.__len__ -============================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.__len__ diff --git a/docs/api/patito/duckdb/Relation/__str__.rst b/docs/api/patito/duckdb/Relation/__str__.rst deleted file mode 100644 index 3bd0c99..0000000 --- a/docs/api/patito/duckdb/Relation/__str__.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Relation.__str__ -============================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.__str__ diff --git a/docs/api/patito/duckdb/Relation/add_prefix.rst b/docs/api/patito/duckdb/Relation/add_prefix.rst deleted file mode 100644 index a2e8ea2..0000000 --- a/docs/api/patito/duckdb/Relation/add_prefix.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Relation.add_prefix -================================= - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.add_prefix diff --git a/docs/api/patito/duckdb/Relation/add_suffix.rst b/docs/api/patito/duckdb/Relation/add_suffix.rst deleted file mode 100644 index 146c442..0000000 --- a/docs/api/patito/duckdb/Relation/add_suffix.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Relation.add_suffix -================================= - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.add_suffix diff --git a/docs/api/patito/duckdb/Relation/aggregate.rst b/docs/api/patito/duckdb/Relation/aggregate.rst deleted file mode 100644 index c7273ea..0000000 --- a/docs/api/patito/duckdb/Relation/aggregate.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Relation.aggregate -================================ - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.aggregate diff --git a/docs/api/patito/duckdb/Relation/alias.rst b/docs/api/patito/duckdb/Relation/alias.rst deleted file mode 100644 index cfdff91..0000000 --- a/docs/api/patito/duckdb/Relation/alias.rst +++ /dev/null @@ -1,7 +0,0 @@ -patito.duckdb.Relation.alias -============================ - -.. currentmodule:: patito.duckdb - -The alias that can be used to refer to the given relation in queries. -Can be set with :ref:`Relation.set_alias()`. diff --git a/docs/api/patito/duckdb/Relation/all.rst b/docs/api/patito/duckdb/Relation/all.rst deleted file mode 100644 index cd153e4..0000000 --- a/docs/api/patito/duckdb/Relation/all.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Relation.all -========================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.all diff --git a/docs/api/patito/duckdb/Relation/case.rst b/docs/api/patito/duckdb/Relation/case.rst deleted file mode 100644 index 80fe5bf..0000000 --- a/docs/api/patito/duckdb/Relation/case.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Relation.case -=========================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.case diff --git a/docs/api/patito/duckdb/Relation/cast.rst b/docs/api/patito/duckdb/Relation/cast.rst deleted file mode 100644 index 676ee0d..0000000 --- a/docs/api/patito/duckdb/Relation/cast.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _Relation.cast: - -patito.duckdb.Relation.cast -=========================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.cast diff --git a/docs/api/patito/duckdb/Relation/coalesce.rst b/docs/api/patito/duckdb/Relation/coalesce.rst deleted file mode 100644 index 125002c..0000000 --- a/docs/api/patito/duckdb/Relation/coalesce.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Relation.coalesce -=============================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.coalesce diff --git a/docs/api/patito/duckdb/Relation/columns.rst b/docs/api/patito/duckdb/Relation/columns.rst deleted file mode 100644 index 007b893..0000000 --- a/docs/api/patito/duckdb/Relation/columns.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Relation.columns -============================== - -.. currentmodule:: patito.duckdb - -.. autoproperty:: Relation.columns diff --git a/docs/api/patito/duckdb/Relation/count.rst b/docs/api/patito/duckdb/Relation/count.rst deleted file mode 100644 index cdcff56..0000000 --- a/docs/api/patito/duckdb/Relation/count.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _duckdb.Relation.count: - -patito.duckdb.Relation.count -============================ - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.count diff --git a/docs/api/patito/duckdb/Relation/create_table.rst b/docs/api/patito/duckdb/Relation/create_table.rst deleted file mode 100644 index cc33905..0000000 --- a/docs/api/patito/duckdb/Relation/create_table.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _duckdb.Relation.create_table: - -patito.duckdb.Relation.create_table -=================================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.create_table diff --git a/docs/api/patito/duckdb/Relation/create_view.rst b/docs/api/patito/duckdb/Relation/create_view.rst deleted file mode 100644 index 0f0156a..0000000 --- a/docs/api/patito/duckdb/Relation/create_view.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _Relation.create_view: - -patito.duckdb.Relation.create_view -================================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.create_view diff --git a/docs/api/patito/duckdb/Relation/distinct.rst b/docs/api/patito/duckdb/Relation/distinct.rst deleted file mode 100644 index 89c4cc1..0000000 --- a/docs/api/patito/duckdb/Relation/distinct.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _Relation.distinct: - -patito.duckdb.Relation.distinct -=============================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.distinct diff --git a/docs/api/patito/duckdb/Relation/drop.rst b/docs/api/patito/duckdb/Relation/drop.rst deleted file mode 100644 index 5616ec4..0000000 --- a/docs/api/patito/duckdb/Relation/drop.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Relation.drop -=========================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.drop diff --git a/docs/api/patito/duckdb/Relation/except_.rst b/docs/api/patito/duckdb/Relation/except_.rst deleted file mode 100644 index 723eaa5..0000000 --- a/docs/api/patito/duckdb/Relation/except_.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _Relation.except_: - -patito.duckdb.Relation.except\_ -=============================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.except_ diff --git a/docs/api/patito/duckdb/Relation/execute.rst b/docs/api/patito/duckdb/Relation/execute.rst deleted file mode 100644 index 25bf1bf..0000000 --- a/docs/api/patito/duckdb/Relation/execute.rst +++ /dev/null @@ -1,6 +0,0 @@ -patito.duckdb.Relation.execute -============================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.execute diff --git a/docs/api/patito/duckdb/Relation/filter.rst b/docs/api/patito/duckdb/Relation/filter.rst deleted file mode 100644 index 844cca1..0000000 --- a/docs/api/patito/duckdb/Relation/filter.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _duckdb.Relation.filter: - -patito.duckdb.Relation.filter -============================= - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.filter diff --git a/docs/api/patito/duckdb/Relation/get.rst b/docs/api/patito/duckdb/Relation/get.rst deleted file mode 100644 index 1914a51..0000000 --- a/docs/api/patito/duckdb/Relation/get.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _duckdb.Relation.get: - -patito.duckdb.Relation.get -========================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.get diff --git a/docs/api/patito/duckdb/Relation/index.rst b/docs/api/patito/duckdb/Relation/index.rst deleted file mode 100644 index ff65cbb..0000000 --- a/docs/api/patito/duckdb/Relation/index.rst +++ /dev/null @@ -1,57 +0,0 @@ -.. _duckdb.Relation.__init__: - -patito.duckdb.Relation -====================== - -.. currentmodule:: patito.duckdb - -.. automethod:: Relation.__init__ - -.. toctree:: - :caption: Methods & Properties - :maxdepth: 1 - - add_prefix - add_suffix - aggregate - alias - all - case - cast - coalesce - columns - count - create_table - create_view - distinct - drop - except_ - execute - filter - get - inner_join - insert_into - intersect - join - left_join - limit - model - order - rename - select