diff --git a/.gitignore b/.gitignore index 15201ac..63a3ab8 100644 --- a/.gitignore +++ b/.gitignore @@ -169,3 +169,10 @@ cython_debug/ # PyPI configuration file .pypirc + +.vscode + + +# Ignore config files with credentials +config/*.json +!config/*.json.template diff --git a/.pylintrc b/.pylintrc index 8db6638..e545389 100644 --- a/.pylintrc +++ b/.pylintrc @@ -33,7 +33,7 @@ ignore-patterns=^\.# # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). -#init-hook= +init-hook='import sys; sys.path.append("src")' # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the # number of processors available to use. @@ -90,7 +90,10 @@ disable=raw-checker-failed, suppressed-message, useless-suppression, deprecated-pragma, - use-symbolic-message-instead + use-symbolic-message-instead, + missing-function-docstring, + missing-module-docstring, + missing-class-docstring # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option @@ -209,7 +212,7 @@ logging-modules=logging [BASIC] # Naming style matching correct argument names. -argument-naming-style=camelCase +#argument-naming-style=camelCase # Regular expression matching correct argument names. Overrides argument- # naming-style. If left empty, argument names will be checked with the set @@ -217,7 +220,7 @@ argument-naming-style=camelCase #argument-rgx= # Naming style matching correct attribute names. -attr-naming-style=camelCase +#attr-naming-style=camelCase # Regular expression matching correct attribute names. Overrides attr-naming- # style. If left empty, attribute names will be checked with the set naming @@ -272,7 +275,7 @@ const-naming-style=UPPER_CASE docstring-min-length=-1 # Naming style matching correct function names. -function-naming-style=camelCase +#function-naming-style=camelCase # Regular expression matching correct function names. Overrides function- # naming-style. If left empty, function names will be checked with the set @@ -303,14 +306,14 @@ inlinevar-naming-style=any #inlinevar-rgx= # Naming style matching correct method names. -method-naming-style=camelCase +#method-naming-style=camelCase # Regular expression matching correct method names. Overrides method-naming- # style. If left empty, method names will be checked with the set naming style. #method-rgx= # Naming style matching correct module names. -module-naming-style=camelCase +#module-naming-style=camelCase # Regular expression matching correct module names. Overrides module-naming- # style. If left empty, module names will be checked with the set naming style. @@ -334,7 +337,7 @@ property-classes=abc.abstractproperty #typevar-rgx= # Naming style matching correct variable names. -variable-naming-style=camelCase +#variable-naming-style=camelCase # Regular expression matching correct variable names. Overrides variable- # naming-style. If left empty, variable names will be checked with the set @@ -574,7 +577,7 @@ max-returns=6 max-statements=50 # Minimum number of public methods for a class (see R0903). -min-public-methods=2 +min-public-methods=1 [EXCEPTIONS] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..b37d2bc --- /dev/null +++ b/LICENSE @@ -0,0 +1,301 @@ +# License + +--- Start of Definition of INTO-CPS Association Public License --- + +/* + +* This file is part of the INTO-CPS Association. + +* Copyright (c) 2017-CurrentYear, INTO-CPS Association (ICA), +* c/o Peter Gorm Larsen, Aarhus University, Department of Engineering, +* Finlandsgade 22, 8200 Aarhus N, Denmark. + +* All rights reserved. + +* THIS PROGRAM IS PROVIDED UNDER THE TERMS OF GPL VERSION 3 LICENSE OR +* THIS INTO-CPS ASSOCIATION PUBLIC LICENSE (ICAPL) VERSION 1.0. +* ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS PROGRAM CONSTITUTES +* RECIPIENT'S ACCEPTANCE OF THE INTO-CPS ASSOCIATION PUBLIC LICENSE OR +* THE GPL VERSION 3, ACCORDING TO RECIPIENTS CHOICE. + +* The INTO-CPS tool suite software and the INTO-CPS Association +* Public License (ICAPL) are obtained from the INTO-CPS Association, either +* from the above address, from the URLs: or +* in the INTO-CPS tool suite distribution. +* GNU version 3 is obtained from: +* . + +* This program is distributed WITHOUT ANY WARRANTY; without +* even the implied warranty of MERCHANTABILITY or FITNESS +* FOR A PARTICULAR PURPOSE, EXCEPT AS EXPRESSLY SET FORTH +* IN THE BY RECIPIENT SELECTED SUBSIDIARY LICENSE CONDITIONS OF +* THE INTO-CPS ASSOCIATION PUBLIC LICENSE. + +* See the full ICAPL conditions for more details. + + */ + +--- End of INTO-CPS Association Public License Header --- + +The ICAPL is a public license for the INTO-CPS tool +suite with three modes/alternatives +(GPL, ICA-Internal-EPL, ICA-External-EPL) for use and redistribution, +in source and/or binary/object-code form: + +* GPL. Any party (member or non-member of the INTO-CPS Association) may use and + redistribute INTO-CPS tool suite under GPL version 3. + +* Silver Level members of the INTO-CPS Association may also use and redistribute + the INTO-CPS tool suite under ICA-Internal-EPL conditions. + +* Gold Level members of the INTO-CPS Association may also use and redistribute + The INTO-CPS tool suite under ICA-Internal-EPL or ICA-External-EPL conditions. + +Definitions of the INTO-CPS Association Public license modes: + +* GPL = GPL version 3. + +* ICA-Internal-EPL = These INTO-CPA Association Public + license conditions together with + Internally restricted EPL, i.e., EPL version 1.0 with the Additional Condition + that use and redistribution by a member of the INTO-CPS Association is only allowed + within the INTO-CPS Association member's own + organization (i.e., its own legal entity), + or for a member of the INTO-CPS Association + paying a membership fee corresponding to + the size of the organization including all its affiliates, use and redistribution + is allowed within/between its affiliates. + +* ICA-External-EPL = These INTO-CPA Association Public + license conditions together with + Externally restricted EPL, i.e., EPL version 1.0 with the Additional Condition + that use and redistribution by a member of the INTO-CPS Association, or by a Licensed + Third Party Distributor having a redistribution agreement with that member, + to parties external to the INTO-CPS Association + member’s own organization (i.e., its own + legal entity) is only allowed in binary/object-code form, except the case of + redistribution to other members the INTO-CPS Association to which source is also + allowed to be distributed. + +[This has the consequence that an external party who wishes to use +the INTO-CPS Association in source form together with +its own proprietary software in all +cases must be a member of the INTO-CPS Association]. + +In all cases of usage and redistribution by recipients, the following +conditions also apply: + +a) Redistributions of source code must retain the above copyright notice, + all definitions, and conditions. It is sufficient if the ICAPL Header is + present in each source file, if the full ICAPL is available in a prominent + and easily located place in the redistribution. + +b) Redistributions in binary/object-code form must reproduce the above + copyright notice, all definitions, and conditions. It is sufficient if the + ICAPL Header and the location in the redistribution of the full ICAPL + are present in the documentation and/or other materials provided with the + redistribution, if the full ICAPL is available in a prominent and easily + located place in the redistribution. + +c) A recipient must clearly indicate its chosen usage mode of ICAPL, + in accompanying documentation and in a text file ICA-USAGE-MODE.txt, + provided with the distribution. + +d) Contributor(s) making a Contribution to the + INTO-CPS Association thereby also makes a + Transfer of Contribution Copyright. In return, upon the effective date of + the transfer, ICA grants the Contributor(s) a Contribution License of the + Contribution. ICA has the right to accept or refuse Contributions. + +Definitions: + +"Subsidiary license conditions" means: + +The additional license conditions depending on the by the recipient chosen +mode of ICAPL, defined by GPL version 3.0 for GPL, and by EPL for +ICA-Internal-EPL and ICA-External-EPL. + +"ICAPL" means: + +INTO-CPS Association Public License version 1.0, i.e., the license +defined here (the text between +"--- Start of Definition of INTO-CPS Association Public License ---" and +"--- End of Definition of INTO-CPS Association +Public License ---", or later versions thereof. + +"ICAPL Header" means: + +INTO-CPS Association Public License Header version 1.2, i.e., the +text between "--- Start of Definition +of INTO-CPS Association Public License ---" and +"--- End of INTO-CPS Association Public License Header ---, or later versions thereof. + +"Contribution" means: + +a) in the case of the initial Contributor, + the initial code and documentation + distributed under ICAPL, and + +b) in the case of each subsequent Contributor: + i) changes to the INTO-CPS tool suite, and + ii) additions to the INTO-CPS tool suite; + +where such changes and/or additions +to the INTO-CPS tool suite originate from and are +distributed by that particular Contributor. +A Contribution 'originates' from +a Contributor if it was added to the INTO-CPS +tool suite by such Contributor itself or +anyone acting on such Contributor's behalf. + +For Contributors licensing the INTO-CPS tool suite under ICA-Internal-EPL or +ICA-External-EPL conditions, the following conditions also hold: + +Contributions do not include additions to the distributed Program which: (i) +are separate modules of software distributed +in conjunction with the INTO-CPS tool suite +under their own license agreement, (ii) are separate modules which are not +derivative works of the INTO-CPS tool suite, and (iii) are separate modules of software +distributed in conjunction with the INTO-CPS tool suite under their own license agreement +where these separate modules are merged with (weaved together with) modules of +The INTO-CPS tool suite to form new modules +that are distributed as object code or source +code under their own license agreement, as allowed under the Additional +Condition of internal distribution according to ICA-Internal-EPL and/or +Additional Condition for external distribution according to ICA-External-EPL. + +"Transfer of Contribution Copyright" means that the Contributors of a +Contribution transfer the ownership and the copyright of the Contribution to +the INTO-CPS Association, the INTO-CPS Association Copyright owner, for +inclusion in the INTO-CPS tool suite. +The transfer takes place upon the effective date +when the Contribution is made available on the +INTO-CPS Association web site under ICAPL, by +such Contributors themselves or anyone acting on such Contributors' behalf. +The transfer is free of charge. If the +Contributors or the INTO-CPS Association so wish, +an optional Copyright transfer agreement can be signed +between the INTO-CPS Association and the Contributors. + +"Contribution License" means a license from the INTO-CPS +Association to the Contributors of the Contribution, effective +on the date of the Transfer of Contribution Copyright, +where the INTO-CPS Association grants the Contributors a +non-exclusive, world-wide, transferable, free of charge, +perpetual license, including sublicensing rights, to use, +have used, modify, have modified, reproduce and or have reproduced the +contributed material, for business and other purposes, including but not +limited to evaluation, development, testing, integration and merging with +other software and distribution. The warranty and liability disclaimers of +ICAPL apply to this license. + +"Contributor" means any person or entity that distributes (part of) +the INTO-CPS tool chain. + +"The Program" means the Contributions distributed in accordance with ICAPL. + +"The INTO-CPS tool chain" means the Contributions +distributed in accordance with ICAPL. + +"Recipient" means anyone who receives the INTO-CPS tool chain under ICAPL, +including all Contributors. + +"Licensed Third Party Distributor" means a reseller/distributor having signed +a redistribution/resale agreement in accordance with ICAPL and the INTO-CPS +Association Bylaws, with a Gold Level organizational member which is not an +Affiliate of the reseller/distributor, for distributing a product containing +part(s) of the INTO-CPS tool suite. The Licensed Third Party Distributor shall +only be allowed further redistribution to other resellers if the Gold Level +member is granting such a right to it in the redistribution/resale agreement +between the Gold Level member and the Licensed Third Party Distributor. + +"Affiliate" shall mean any legal entity, directly or indirectly, through one +or more intermediaries, controlling or controlled by or under common control +with any other legal entity, as the case may be. For purposes of this +definition, the term "control" (including the terms "controlling," +"controlled by" and "under common control with") means the possession, +direct or indirect, of the power to direct or cause the direction of the +management and policies of a legal entity, whether through the ownership of +voting securities, by contract or otherwise. + +NO WARRANTY + +EXCEPT AS EXPRESSLY SET FORTH IN THE BY RECIPIENT SELECTED SUBSIDIARY +LICENSE CONDITIONS OF ICAPL, THE INTO-CPS ASSOCIATION IS PROVIDED ON AN "AS IS" +BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR +IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF +TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR +PURPOSE. Each Recipient is solely responsible for determining the +appropriateness of using and distributing the +INTO-CPS tool suite and assumes all risks +associated with its exercise of rights under ICAPL , including but not +limited to the risks and costs of program errors, compliance with applicable +laws, damage to or loss of data, programs or equipment, and unavailability +or interruption of operations. + +DISCLAIMER OF LIABILITY + +EXCEPT AS EXPRESSLY SET FORTH IN THE BY RECIPIENT SELECTED SUBSIDIARY +LICENSE CONDITIONS OF ICAPL, NEITHER RECIPIENT NOR ANY CONTRIBUTORS +SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION +LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE INTO-CPS TOOL +SUITE OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED +OF THE POSSIBILITY OF SUCH DAMAGES. + +A Contributor licensing the INTO-CPS tool suite under ICA-Internal-EPL or +ICA-External-EPL may choose to distribute (parts of) the INTO-CPS tool suite +in object code form under its own license agreement, provided that: + +a) it complies with the terms and conditions of ICAPL; or for the case of +redistribution of the INTO-CPS tool suite +together with proprietary code it is a dual +license where the INTO-CPS tool suite parts are distributed under ICAPL compatible +conditions and the proprietary code is distributed under proprietary license +conditions; and + +b) its license agreement: + i) effectively disclaims on behalf of all Contributors all warranties and +conditions, express and implied, including warranties or conditions of title +and non-infringement, and implied warranties or conditions of merchantability +and fitness for a particular purpose; + ii) effectively excludes on behalf of all Contributors all liability for +damages, including direct, indirect, special, incidental and consequential +damages, such as lost profits; + iii) states that any provisions which differ + from ICAPL are offered by that +Contributor alone and not by any other party; and + iv) states from where the source code + for the INTO-CPS tool suite is available, and +informs licensees how to obtain it in a reasonable manner on or through a +medium customarily used for software exchange. + +When the INTO-CPS tool suite is made available in source code form: + + a) it must be made available under ICAPL; and + + b) a copy of ICAPL must be included with each copy of the INTO-CPS tool suite. + + c) a copy of the subsidiary license associated with the selected mode of +ICAPL must be included with each copy of the INTO-CPS tool suite. + +Contributors may not remove or alter any copyright notices contained within +The INTO-CPS tool suite. + +If there is a conflict between ICAPL and the subsidiary license conditions, +ICAPL has priority. + +This Agreement is governed by the laws of Denmark. The place of jurisdiction +for all disagreements related to this Agreement, is Aarhus, Denmark. + +The EPL 1.0 license definition has been obtained from: +. +It is also reproduced in the INTO-CPS distribution. + +The GPL Version 3 license definition has been obtained from +. +It is also reproduced in the INTO-CPS distribution. + +--- End of Definition of INTO-CPS Association Public License --- \ No newline at end of file diff --git a/README.md b/README.md index 80415fe..a739214 100644 --- a/README.md +++ b/README.md @@ -17,11 +17,11 @@ source .venv/bin/activate # On Linux pip install poetry #Specifically install poetry to your system # If you have poetry installed globally poetry env activate # shows the command to activate venv -pylint src --rcfile=../.pylintrc # runs linting checks - poetry install # installs all required python packages +pylint src tests --rcfile=.pylintrc # runs linting checks + poetry build # builds cp-sens package that can be published on pip -poetry run start # runs the main script +poetry run experiment_1 # run one experiment with real data ``` ## Testing @@ -32,13 +32,3 @@ _test_*_.py_. To run all tests, with coverage: ```bash pytest ``` - -## Use - -Only MQTT client code is working at the moment. -You can use it by setting the `src/cp-sens/data/config/mqtt.json` -and executing, - -```bash -python .\src\cp-sens\data\sources\mqtt.py -``` diff --git a/src/cp-sens/data/__init__.py b/config/__init__.py similarity index 100% rename from src/cp-sens/data/__init__.py rename to config/__init__.py diff --git a/config/production.json.template b/config/production.json.template new file mode 100644 index 0000000..24f96f8 --- /dev/null +++ b/config/production.json.template @@ -0,0 +1,18 @@ +{ + "MQTT": { + "host": "dtl-server-2.st.lab.au.dk", + "port": 8090, + "userId": "NEEDED", + "password": "NEEDED", + "ClientID": "test_client_id", + "QoS": 1, + "TopicsToSubscribe": [ + "cpsens/d8-3a-dd-f5-92-48/cpsns_Simulator/1/acc/raw/data", + "cpsens/d8-3a-dd-f5-92-48/cpsns_Simulator/1/acc/raw/metadata", + "cpsens/d8-3a-dd-f5-92-48/cpsns_Simulator/2/acc/raw/data", + "cpsens/d8-3a-dd-f5-92-48/cpsns_Simulator/2/acc/raw/metadata", + "cpsens/d8-3a-dd-f5-92-48/cpsns_Simulator/3/acc/raw/data", + "cpsens/d8-3a-dd-f5-92-48/cpsns_Simulator/3/acc/raw/metadata" + ] + } +} diff --git a/config/r-pi.json.template b/config/r-pi.json.template new file mode 100644 index 0000000..ce88ebb --- /dev/null +++ b/config/r-pi.json.template @@ -0,0 +1,11 @@ +{ + "MQTT": { + "host": "test.mosquitto.org", + "port": 1883, + "userId": "", + "password": "", + "ClientID": "test_client_id", + "QoS": 1, + "TopicsToSubscribe": ["cpsens/DAQ_ID/MODULE_ID/Sensor1/acc/raw/data"] + } +} diff --git a/src/cp-sens/config/mqtt.json b/config/test.json.template similarity index 65% rename from src/cp-sens/config/mqtt.json rename to config/test.json.template index 4e08974..8177337 100644 --- a/src/cp-sens/config/mqtt.json +++ b/config/test.json.template @@ -1,11 +1,11 @@ { "MQTT": { "host": "test.mosquitto.org", - "port": 1883, + "port": 1883, "userId": "", "password": "", "ClientID": "test_client_id", "QoS": 1, - "TopicsToSubscribe": ["topic"] + "TopicsToSubscribe": ["topicAA", "topicBB", "topicCC"] } } diff --git a/poetry.lock b/poetry.lock index 392b587..e6bea62 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,17 @@ # This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + [[package]] name = "astroid" version = "3.3.8" @@ -12,18 +24,21 @@ files = [ {file = "astroid-3.3.8.tar.gz", hash = "sha256:a88c7994f914a4ea8572fac479459f4955eeccc877be3f2d959a33273b0cf40b"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["dev"] -markers = "sys_platform == \"win32\"" +groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "platform_system == \"Windows\"", dev = "sys_platform == \"win32\""} [[package]] name = "contourpy" @@ -172,6 +187,9 @@ files = [ {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, ] +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + [package.extras] toml = ["tomli ; python_full_version <= \"3.11.0a6\""] @@ -207,6 +225,22 @@ files = [ graph = ["objgraph (>=1.7.2)"] profile = ["gprof2dot (>=2022.7.29)"] +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + [[package]] name = "fonttools" version = "4.55.3" @@ -560,6 +594,93 @@ files = [ [package.extras] proxy = ["pysocks"] +[[package]] +name = "pandas" +version = "2.2.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + [[package]] name = "pillow" version = "11.0.0" @@ -686,6 +807,140 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pydantic" +version = "2.10.6" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.27.2" +typing-extensions = ">=4.12.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pylint" version = "3.3.3" @@ -702,18 +957,34 @@ files = [ astroid = ">=3.3.8,<=3.4.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, {version = ">=0.3.6", markers = "python_version == \"3.11\""}, ] isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} tomlkit = ">=0.10.1" [package.extras] spelling = ["pyenchant (>=3.2,<4.0)"] testutils = ["gitpython (>3)"] + + +[package.dependencies] +matplotlib = ">=3.7.4" +numpy = {version = ">=1.25", markers = "python_version >= \"3.9\""} +pandas = ">=2.0.3" +pydantic = ">=2.5.1" +scipy = ">=1.9.3" +tqdm = ">=4.66.1" + +[package.extras] +openpyxl = ["openpyxl (>=3.1.3)"] +pyvista = ["PyQt5 (==5.15.10)", "PyQt5-Qt5 (==5.15.14) ; sys_platform == \"darwin\"", "PyQt5-Qt5 (==5.15.2) ; sys_platform != \"darwin\"", "PyQt5-sip (==12.15.0)", "pyvista[all]", "pyvistaqt", "vtk (==9.3.1) ; python_version != \"3.8\" or sys_platform != \"darwin\""] + [[package]] name = "pyparsing" version = "3.2.0" @@ -743,9 +1014,11 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] @@ -784,6 +1057,82 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "pytz" +version = "2025.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, + {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, +] + +[[package]] +name = "scipy" +version = "1.15.2" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "scipy-1.15.2-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a2ec871edaa863e8213ea5df811cd600734f6400b4af272e1c011e69401218e9"}, + {file = "scipy-1.15.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:6f223753c6ea76983af380787611ae1291e3ceb23917393079dcc746ba60cfb5"}, + {file = "scipy-1.15.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:ecf797d2d798cf7c838c6d98321061eb3e72a74710e6c40540f0e8087e3b499e"}, + {file = "scipy-1.15.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:9b18aa747da280664642997e65aab1dd19d0c3d17068a04b3fe34e2559196cb9"}, + {file = "scipy-1.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87994da02e73549dfecaed9e09a4f9d58a045a053865679aeb8d6d43747d4df3"}, + {file = "scipy-1.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69ea6e56d00977f355c0f84eba69877b6df084516c602d93a33812aa04d90a3d"}, + {file = "scipy-1.15.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:888307125ea0c4466287191e5606a2c910963405ce9671448ff9c81c53f85f58"}, + {file = "scipy-1.15.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9412f5e408b397ff5641080ed1e798623dbe1ec0d78e72c9eca8992976fa65aa"}, + {file = "scipy-1.15.2-cp310-cp310-win_amd64.whl", hash = "sha256:b5e025e903b4f166ea03b109bb241355b9c42c279ea694d8864d033727205e65"}, + {file = "scipy-1.15.2-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:92233b2df6938147be6fa8824b8136f29a18f016ecde986666be5f4d686a91a4"}, + {file = "scipy-1.15.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:62ca1ff3eb513e09ed17a5736929429189adf16d2d740f44e53270cc800ecff1"}, + {file = "scipy-1.15.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:4c6676490ad76d1c2894d77f976144b41bd1a4052107902238047fb6a473e971"}, + {file = "scipy-1.15.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8bf5cb4a25046ac61d38f8d3c3426ec11ebc350246a4642f2f315fe95bda655"}, + {file = "scipy-1.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a8e34cf4c188b6dd004654f88586d78f95639e48a25dfae9c5e34a6dc34547e"}, + {file = "scipy-1.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28a0d2c2075946346e4408b211240764759e0fabaeb08d871639b5f3b1aca8a0"}, + {file = "scipy-1.15.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:42dabaaa798e987c425ed76062794e93a243be8f0f20fff6e7a89f4d61cb3d40"}, + {file = "scipy-1.15.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6f5e296ec63c5da6ba6fa0343ea73fd51b8b3e1a300b0a8cae3ed4b1122c7462"}, + {file = "scipy-1.15.2-cp311-cp311-win_amd64.whl", hash = "sha256:597a0c7008b21c035831c39927406c6181bcf8f60a73f36219b69d010aa04737"}, + {file = "scipy-1.15.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c4697a10da8f8765bb7c83e24a470da5797e37041edfd77fd95ba3811a47c4fd"}, + {file = "scipy-1.15.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:869269b767d5ee7ea6991ed7e22b3ca1f22de73ab9a49c44bad338b725603301"}, + {file = "scipy-1.15.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:bad78d580270a4d32470563ea86c6590b465cb98f83d760ff5b0990cb5518a93"}, + {file = "scipy-1.15.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:b09ae80010f52efddb15551025f9016c910296cf70adbf03ce2a8704f3a5ad20"}, + {file = "scipy-1.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a6fd6eac1ce74a9f77a7fc724080d507c5812d61e72bd5e4c489b042455865e"}, + {file = "scipy-1.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b871df1fe1a3ba85d90e22742b93584f8d2b8e6124f8372ab15c71b73e428b8"}, + {file = "scipy-1.15.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:03205d57a28e18dfd39f0377d5002725bf1f19a46f444108c29bdb246b6c8a11"}, + {file = "scipy-1.15.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:601881dfb761311045b03114c5fe718a12634e5608c3b403737ae463c9885d53"}, + {file = "scipy-1.15.2-cp312-cp312-win_amd64.whl", hash = "sha256:e7c68b6a43259ba0aab737237876e5c2c549a031ddb7abc28c7b47f22e202ded"}, + {file = "scipy-1.15.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01edfac9f0798ad6b46d9c4c9ca0e0ad23dbf0b1eb70e96adb9fa7f525eff0bf"}, + {file = "scipy-1.15.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:08b57a9336b8e79b305a143c3655cc5bdbe6d5ece3378578888d2afbb51c4e37"}, + {file = "scipy-1.15.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:54c462098484e7466362a9f1672d20888f724911a74c22ae35b61f9c5919183d"}, + {file = "scipy-1.15.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:cf72ff559a53a6a6d77bd8eefd12a17995ffa44ad86c77a5df96f533d4e6c6bb"}, + {file = "scipy-1.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9de9d1416b3d9e7df9923ab23cd2fe714244af10b763975bea9e4f2e81cebd27"}, + {file = "scipy-1.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb530e4794fc8ea76a4a21ccb67dea33e5e0e60f07fc38a49e821e1eae3b71a0"}, + {file = "scipy-1.15.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5ea7ed46d437fc52350b028b1d44e002646e28f3e8ddc714011aaf87330f2f32"}, + {file = "scipy-1.15.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11e7ad32cf184b74380f43d3c0a706f49358b904fa7d5345f16ddf993609184d"}, + {file = "scipy-1.15.2-cp313-cp313-win_amd64.whl", hash = "sha256:a5080a79dfb9b78b768cebf3c9dcbc7b665c5875793569f48bf0e2b1d7f68f6f"}, + {file = "scipy-1.15.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:447ce30cee6a9d5d1379087c9e474628dab3db4a67484be1b7dc3196bfb2fac9"}, + {file = "scipy-1.15.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:c90ebe8aaa4397eaefa8455a8182b164a6cc1d59ad53f79943f266d99f68687f"}, + {file = "scipy-1.15.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:def751dd08243934c884a3221156d63e15234a3155cf25978b0a668409d45eb6"}, + {file = "scipy-1.15.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:302093e7dfb120e55515936cb55618ee0b895f8bcaf18ff81eca086c17bd80af"}, + {file = "scipy-1.15.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cd5b77413e1855351cdde594eca99c1f4a588c2d63711388b6a1f1c01f62274"}, + {file = "scipy-1.15.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d0194c37037707b2afa7a2f2a924cf7bac3dc292d51b6a925e5fcb89bc5c776"}, + {file = "scipy-1.15.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:bae43364d600fdc3ac327db99659dcb79e6e7ecd279a75fe1266669d9a652828"}, + {file = "scipy-1.15.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f031846580d9acccd0044efd1a90e6f4df3a6e12b4b6bd694a7bc03a89892b28"}, + {file = "scipy-1.15.2-cp313-cp313t-win_amd64.whl", hash = "sha256:fe8a9eb875d430d81755472c5ba75e84acc980e4a8f6204d402849234d3017db"}, + {file = "scipy-1.15.2.tar.gz", hash = "sha256:cd58a314d92838f7e6f755c8a2167ead4f27e1fd5c1251fd54289569ef3495ec"}, +] + +[package.dependencies] +numpy = ">=1.23.5,<2.5" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] +doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.16.5)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.0.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"] +test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + [[package]] name = "six" version = "1.17.0" @@ -796,6 +1145,49 @@ files = [ {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] +[[package]] +name = "tomli" +version = "2.2.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] + [[package]] name = "tomlkit" version = "0.13.2" @@ -808,7 +1200,54 @@ files = [ {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] +[[package]] +name = "tqdm" +version = "4.67.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] +discord = ["requests"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] +markers = {dev = "python_version < \"3.11\""} + +[[package]] +name = "tzdata" +version = "2025.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["main"] +files = [ + {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, + {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, +] + [metadata] lock-version = "2.1" -python-versions = "^3.11" -content-hash = "624ccebfb71fd020a3379f1770fa9683f59e9887380c9c1fad27c586735bc4be" +python-versions = ">=3.10, <3.13" +content-hash = "c8abbc2048e9c6e5526487d2d2f0d2b379e5cfc9fe5d12767c52770c296e6796" diff --git a/pyproject.toml b/pyproject.toml index c04a576..4d8cae5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,14 +1,17 @@ [tool.poetry] -name = "cp-sens" +name = "example-shm" version = "0.1.0" description = "" -authors = ["prasadtalasila "] +authors = [ + "Mohamed Abdulkarim <202206332@post.au.dk>", + "Prasad Talasila " +] readme = "README.md" license = "INTO-CPS Association" packages = [{include = "*", from="src"}] [tool.poetry.dependencies] -python = "^3.11" +python = ">=3.10, <3.13" paho-mqtt = "^2.1.0" matplotlib = "^3.10.0" @@ -22,4 +25,4 @@ requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" [tool.poetry.scripts] -start = "src.main:main" +experiment_1 = "src.experiment_1:main" \ No newline at end of file diff --git a/pytest.ini b/pytest.ini index 1a38571..0171d98 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,6 +1,6 @@ [pytest] minversion = 8.3 -pythonpath = src/cp-sens tests/cp-sens +pythonpath = src src/data src/methods tests testpaths = tests addopts = --cov=src --cov-report=term-missing --cov-report=html diff --git a/src/cp-sens/data/accel/hbk/METADATA.md b/src/cp-sens/data/accel/hbk/METADATA.md deleted file mode 100644 index da79800..0000000 --- a/src/cp-sens/data/accel/hbk/METADATA.md +++ /dev/null @@ -1,76 +0,0 @@ -# METADATA Documentation - -## Overview - -This document describes the structure and handling of the metadata and data topics used in the MQTT communication system for accelerometer sensors. It explains the format of the payloads, metadata versioning, data consistency checks, and handling sensor-specific metadata. - -The MQTT system uses hierarchical topics to identify the source and type of data. The two key topics are: - -1. **Data Topic**: Contains dynamic data from sensors. -1. **Metadata Topic**: Contains static information, including metadata related to the sensor, analysis chain, and engineering information. - -## MQTT Topic Format - -The MQTT topic structure follows the pattern: - -```txt -cpsens/DAQ_ID/MODULE_ID/CH_ID/PHYSICS/ANALYSIS/DATA_ID -``` - -### Example Topics - -#### Data Topic - -```txt -cpsens/RPi_1234/1/1/acc/raw/data -``` - -This represents data coming from an accelerometer (`acc`), processed as raw data (`raw`), from channel 1 of module 1 on device `RPi_1234`. - -#### Metadata Topic - -```txt -cpsens/RPi_1234/1/1/acc/raw/metadata -``` - -This topic contains metadata for the raw data from the same device, module, and channel. - -## Payload Format - -### Data Topic Payload - -The **data topic** payload consists of two parts: - -1. **Descriptor**: Contains dynamic metadata related to the data. -1. **Data**: Contains the actual sensor readings (typically as binary data). - -#### Example Data Topic Payload - -```json -{ -"descriptor": { - "length": 10, - "timestamp": "1638493434", - "metadata_version": 1 -}, -"data": { - "type": "double", - "values": [0.5, 0.3, 0.7] -}, -"sensor": { - "sensing": "acceleration", - "sensitivity": 100, - "unit": "mV/ms-2" -}, -"DAQ_device": { - "IP_address": "192.168.100.101", - "type": "Raspberry PI" -}, -"analysis_chain": { - "analysis1": { - "name": "raw", - "sampling_rate_Sa_per_s": 100 - } -} -} -``` diff --git a/src/cp-sens/data/accel/senseHAT.py b/src/cp-sens/data/accel/senseHAT.py deleted file mode 100644 index fa85853..0000000 --- a/src/cp-sens/data/accel/senseHAT.py +++ /dev/null @@ -1,26 +0,0 @@ -from datetime import datetime -from sense_hat import SenseHat -from .accelerometer import IAccelerometer, us_multiplier - -""" -Collects one reading from STM LSM9DS1 IMU -available on the senseHAT - -TODO: This class works correctly only if it is -1) Raspberry Pi with senseHAT installed on it -2) used in non virtual environments (no venv) -""" -class senseHAT(IAccelerometer): - sense = SenseHat() - - def read(self) -> dict: - accel = self.sense.get_accelerometer_raw() - # represents time at resolution of a microsecond - # but depends on the underlying clock - timestamp = datetime.timestamp(datetime.now()) - key = round(timestamp*us_multiplier) - sample = { - 'timestamp': timestamp, - 'acceleration': accel - } - return sample diff --git a/src/cp-sens/data/sources/mqtt.py b/src/cp-sens/data/sources/mqtt.py deleted file mode 100644 index 1166e8a..0000000 --- a/src/cp-sens/data/sources/mqtt.py +++ /dev/null @@ -1,82 +0,0 @@ -import json -import os -import time -from paho.mqtt.client import Client as MQTTClient, CallbackAPIVersion, MQTTv5 # type: ignore - -def load_config(config_path: str) -> dict: - """ - Loads JSON configuration from the provided config path. - - Raises: - FileNotFoundError: If the file is not found. - ValueError: If the file cannot be decoded as JSON. - Exception: For any other unexpected error. - """ - try: - with open(config_path, "r") as f: - json_config = json.load(f) - print("JSON configuration loaded successfully.") - return json_config - except FileNotFoundError: - raise FileNotFoundError(f"Error: The file {config_path} was not found.") - except json.JSONDecodeError: - raise ValueError(f"Error: The file {config_path} could not be decoded as JSON.") - except Exception as e: - raise Exception(f"An unexpected error occurred: {e}") - - -def create_on_connect_callback(topics, qos): - def on_connect(client, userdata, flags, rc, properties=None): - print(f"on_connect: Connected with response code {rc}") - if rc == 0: # Connection was successful - for topic in topics: - print(f"Subscribing to the topic {topic}...") - client.subscribe(topic, qos=qos) - else: - print("Connection failed with result code:", rc) - return on_connect - -def create_on_subscribe_callback(): - def on_subscribe(client, userdata, mid, granted_qos, properties=None): - print(f"on_subscribe: Subscription ID {mid} with QoS levels {granted_qos}") - return on_subscribe - -def create_on_message_callback(): - def on_message(client, userdata, msg): - print(f"on_message: Received message on {msg.topic}") - #print(f"Message payload: {msg.payload.decode()}") - return on_message - -def create_on_publish_callback(): - def on_publish(client, userdata, mid): - print(f"on_publish: Message {mid} published.") - return on_publish - -def setup_mqtt_client(config): - mqttc = MQTTClient( - client_id=config["MQTT"]["ClientID"], - callback_api_version=CallbackAPIVersion.VERSION2, - protocol=MQTTv5 - ) - if config["MQTT"]["userId"]: - mqttc.username_pw_set(config["MQTT"]["userId"], config["MQTT"]["password"]) - - # Assign callbacks. - mqttc.on_connect = create_on_connect_callback(config["MQTT"]["TopicsToSubscribe"], - config["MQTT"]["QoS"]) - mqttc.on_subscribe = create_on_subscribe_callback() - mqttc.on_message = create_on_message_callback() - mqttc.on_publish = create_on_publish_callback() - return mqttc - -def main() -> None: - current_dir = os.path.dirname(os.path.abspath(__file__)) - config_path = os.path.join(current_dir, "../../config/mqtt.json") - json_config = load_config(config_path) - mqttc = setup_mqtt_client(json_config) - mqttc.connect(json_config["MQTT"]["host"], json_config["MQTT"]["port"], 60) - mqttc.loop_start() - time.sleep(10) - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/src/cp-sens/experiment_1.py b/src/cp-sens/experiment_1.py deleted file mode 100644 index 4d0951b..0000000 --- a/src/cp-sens/experiment_1.py +++ /dev/null @@ -1,12 +0,0 @@ -from data.accel.accelerometer import IAccelerometer -from data.accel.random import RandomSource -from data.accel.senseHAT import senseHAT - -def main(): - random: IAccelerometer = RandomSource() - print(random.read()) - sensehat: IAccelerometer = senseHAT() - print(sensehat.read()) - -if __name__ == '__main__': - main() \ No newline at end of file diff --git a/src/cp-sens/data/sources/__init__.py b/src/data/__init__.py similarity index 100% rename from src/cp-sens/data/sources/__init__.py rename to src/data/__init__.py diff --git a/src/cp-sens/data/accel/hbk/.gitkeep b/src/data/accel/__init__.py similarity index 100% rename from src/cp-sens/data/accel/hbk/.gitkeep rename to src/data/accel/__init__.py diff --git a/src/cp-sens/data/accel/accelerometer.py b/src/data/accel/accelerometer.py similarity index 62% rename from src/cp-sens/data/accel/accelerometer.py rename to src/data/accel/accelerometer.py index 51d49e1..a48a3fa 100644 --- a/src/cp-sens/data/accel/accelerometer.py +++ b/src/data/accel/accelerometer.py @@ -1,17 +1,18 @@ import abc +import numpy as np + +US_MULTIPLIER = 1000000 # factor to convert time to microseconds -# multiplier to convert time from -us_multiplier = 1000000 # factor to convert time to microseconds class IAccelerometer(abc.ABC): @abc.abstractmethod - def read() -> dict: + def read(self, requested_samples: int) -> (int, np.ndarray): # type: ignore """ This method provides a single accelerometer reading. A sample reading is: sample = { 'timestamp': 0, - 'accel': { + 'accel_readings': { 'x': 0, 'y': 0, 'z': 0 @@ -19,4 +20,3 @@ def read() -> dict: } """ pass - diff --git a/src/data/accel/constants.py b/src/data/accel/constants.py new file mode 100644 index 0000000..dba40e2 --- /dev/null +++ b/src/data/accel/constants.py @@ -0,0 +1,7 @@ +MAX_MAP_SIZE = 52200 # The maximum number of samples saved in FIFO + +TIMEOUT = 2 # Max wait time until enough samples are collected for the test_Accelerometer + +INTERVAL = 0.001 # Check every 0.001s to see if samples are collected + +MIN_SAMPLES_NEEDED = 500 # Minimum samples needed before running it to sysid diff --git a/src/data/accel/hbk/.gitkeep b/src/data/accel/hbk/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/src/data/accel/hbk/Accelerometer.py b/src/data/accel/hbk/Accelerometer.py new file mode 100644 index 0000000..3cd12bd --- /dev/null +++ b/src/data/accel/hbk/Accelerometer.py @@ -0,0 +1,131 @@ +import threading +import struct +from collections import deque +import numpy as np + +# Project Imports +from data.accel.accelerometer import IAccelerometer +from data.accel.constants import MAX_MAP_SIZE + +class Accelerometer(IAccelerometer): + def __init__( + self, + mqtt_client, + topic: str = "cpsens/d8-3a-dd-f5-92-48/cpsns_Simulator/1/acc/raw/data", + map_size: int = MAX_MAP_SIZE ): + + + """ + Initializes the Accelerometer instance with a pre-configured MQTT client. + + Parameters: + mqtt_client: A pre-configured and connected MQTT client. + topic (str): The MQTT topic to subscribe to. Defaults to "channel 0 topic". + map_size (int): The maximum number of samples to store in the Map. + """ + self.mqtt_client = mqtt_client + + self.topic = topic + self._map_size = map_size + self.data_map = {} + self._lock = threading.Lock() + + # Setting up MQTT callback + self.mqtt_client.on_message = self._on_message + + def _on_message(self, _, __, msg): + """Handles incoming MQTT messages.""" + print(f"Received message on topic {msg.topic}") + + def safe_process(): # This ensures that an exception does not crash the entire thread + try: + self._process_message(msg) + except Exception as e: + print(f"Error processing message: {e}") + + threading.Thread(target=safe_process, daemon=True).start() + + + def _process_message(self, msg): + """ + Processes incoming MQTT messages, extracts accelerometer data, + and stores it in a dictionary of FIFO queues. + + - Each unique `samples_from_daq_start` gets its own `deque`. + - If the number of keys in `self.data_map` exceeds `_map_size`, + the oldest key is removed (oldest data batch is discarded). + """ + try: + raw_payload = msg.payload + + # Extract metadata + # We know that the first 2 bytes tells the length of the descriptor + descriptor_length = struct.unpack(" self._map_size: + oldest_key = min(self.data_map.keys()) # Find the oldest batch + del self.data_map[oldest_key] # Remove oldest batch + print(f" Channel: {self.topic} Key: {samples_from_daq_start}, Samples: {num_samples}") + + except Exception as e: + print(f"Error processing message: {e}") + + + def read(self, requested_samples: int) -> (int, np.ndarray): + """ + Reads the oldest accelerometer data from the FIFO buffer and removes only the read samples. + + Parameters: + requested_samples (int): The number of samples desired. + + Returns: + Tuple[int, np.ndarray]: + - status: 1 if the number of samples returned equals the requested number, + 0 if fewer samples were available. + - data: A NumPy array of shape (n_samples,). + """ + with self._lock: + sorted_keys = sorted(self.data_map.keys()) + + samples = [] + samples_collected = 0 + + for key in sorted_keys: + entry = self.data_map[key] # Access the deque directly + + if samples_collected + len(entry) <= requested_samples: + # Take the whole entry and remove it + samples.extend(entry) + samples_collected += len(entry) + del self.data_map[key] + else: + # Take only the required number of samples + remaining_samples = requested_samples - samples_collected + # Using list here because we need to slice it in order to only take what we need + samples.extend(list(entry)[:remaining_samples]) + for _ in range(remaining_samples): + entry.popleft() # Remove samples from deque + samples_collected += remaining_samples + break # Stop once we have enough samples + + samples = np.array(samples, dtype=np.float64) + status = 1 if samples_collected == requested_samples else 0 + + return status, samples + + + def acquire_lock(self)->(threading.Lock): + return self._lock diff --git a/src/data/accel/hbk/METADATA.md b/src/data/accel/hbk/METADATA.md new file mode 100644 index 0000000..4ea87f5 --- /dev/null +++ b/src/data/accel/hbk/METADATA.md @@ -0,0 +1,142 @@ +# METADATA Documentation + +## Overview + +This document describes the structure and handling of the metadata and data topics used in the MQTT communication system for accelerometer sensors. It explains the format of the payloads, metadata versioning, data consistency checks, and handling sensor-specific metadata. + +The MQTT system uses hierarchical topics to identify the source and type of data. The two key topics are: + +1. **Data Topic**: Contains dynamic data from sensors. +1. **Metadata Topic**: Contains static information, including metadata related to the sensor, analysis chain, and engineering information. + +## MQTT Topic Format + +The MQTT topic structure follows the pattern: + +```txt +cpsens/DAQ_ID/MODULE_ID/CH_ID/PHYSICS/ANALYSIS/DATA_ID +``` + +### Example Topics + +#### Metadata Topic + +```txt +cpsens/d3-f2-f3-b3/cpsns_Simulator/1/acc/raw/metadata +``` + +This represents metadata coming from an accelerometer (`acc`), processed as raw data (`raw`), from channel 1 of The cpsns_Simulator module on device `d3-f2-f3-b`. + +### METADATA Topic Payload + +#### Example METADATA Topic Payload + +```json +{ + "Descriptor": { + "Descriptor length": "uint16", + "Metadata version": "uint16", + "Seconds since epoch": "uint64", + "Nanoseconds": "uint64", + "Samples from DAQ start": "uint64" + }, + "Data": { + "Type": "float", + "Samples": 32, + "Unit": "m/s^2" + }, + "Sensor": { + "Sensing": "acceleration", + "Sensitivity": 100.0, + "Sensitivity unit": "mV/(m/s^2)", + "Vendor": "HBK", + "Type": "4507 B", + "S/N": "12345" + }, + "DAQ": { + "Type": "DAQ_Simulator", + "MAC": "a1-a2-a3-a4", + "IP": "" + }, + "Analysis chain": [ + { + "Name": "acquisition", + "Output": "raw", + "Sampling": 512.0 + } + ], + "Engineering": { + "project": "name", + "projectid": 42, + "channelgroupname": "Blade", + "channelgroupid": 1, + "channelName": "SOO", + "DOF": 156149, + "Node": 156149, + "Dir": 1 + }, + "TimeAtAquisitionStart": { + "Seconds": 1741618465, + "Nanosec": 641669098 + } +} +``` + +#### Data Topic + +```txt +cpsens/d3-f2-f3-b/cpsns_Simulator/1/acc/raw/data +``` + +This topic contains data for the raw data from the same device, module, and channel. + + +### Data Topic Payload + +The **data topic** payload consists of two parts: + +1. **Descriptor**: Contains dynamic metadata related to the data. +1. **Data**: Contains the actual sensor readings (typically as binary data). + +#### Example Data Topic Payload + +```json +{ + "descriptor": { + "descriptor_length": 28, + "metadata_version": 2, + "seconds_since_epoch": 1742400339, + "nanoseconds": 1504491492025, + "samples_from_daq_start": 400319264 + }, + "data": { + "values": [3.5, 4.3, 4.7] + } +} +``` + +## MQTT Configuration + +The acceleration measurements are streamed via MQTT broker. The following +configuration needs to be placed in `config/mqtt.json` and +credentials modified. + +```json +{ + "MQTT": { + "host": "test.mosquitto.org", + "port": 1883, + "userId": "", + "password": "", + "ClientID": "test_client_id", + "QoS": 1, + "TopicsToSubscribe": [ + "cpsens/d8-3a-dd-f5-92-48/cpsns_Simulator/0/acc/raw/data", + "cpsens/d8-3a-dd-f5-92-48/cpsns_Simulator/0/acc/raw/metadata", + "cpsens/d8-3a-dd-f5-92-48/cpsns_Simulator/1/acc/raw/data", + "cpsens/d8-3a-dd-f5-92-48/cpsns_Simulator/1/acc/raw/metadata", + "cpsens/d8-3a-dd-f5-92-48/cpsns_Simulator/2/acc/raw/data", + "cpsens/d8-3a-dd-f5-92-48/cpsns_Simulator/2/acc/raw/metadata" + ] + } +} diff --git a/src/data/accel/hbk/__init__.py b/src/data/accel/hbk/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/cp-sens/data/accel/random.py b/src/data/accel/random.py similarity index 64% rename from src/cp-sens/data/accel/random.py rename to src/data/accel/random.py index f12c3e6..de13600 100644 --- a/src/cp-sens/data/accel/random.py +++ b/src/data/accel/random.py @@ -1,25 +1,28 @@ from datetime import datetime import random -from .accelerometer import IAccelerometer, us_multiplier +from .accelerometer import IAccelerometer, US_MULTIPLIER """ A dummy accelerometer that generates random (x,y,z) values in the [-1,1] range """ + + class RandomSource(IAccelerometer): def read(self) -> dict: accel = { - 'x': random.uniform(-1, 1), - 'y': random.uniform(-1, 1), - 'z': random.uniform(-1, 1) + 'x': random.uniform(-1, 1), + 'y': random.uniform(-1, 1), + 'z': random.uniform(-1, 1) } # represents time at resolution of a microsecond # but depends on the underlying clock timestamp = datetime.timestamp(datetime.now()) - key = round(timestamp*us_multiplier) + # pylint: disable=unused-variable + key = round(timestamp * US_MULTIPLIER) sample = { 'timestamp': timestamp, 'acceleration': accel - } + } return sample diff --git a/src/data/sources/__init__.py b/src/data/sources/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/data/sources/mqtt.py b/src/data/sources/mqtt.py new file mode 100644 index 0000000..067276b --- /dev/null +++ b/src/data/sources/mqtt.py @@ -0,0 +1,122 @@ +""" +MQTT Client Setup and Utility Functions. + +This module provides functions to set up an MQTT client, handle connections, +subscriptions, and message publishing using the Paho MQTT library. +""" + +import json +from paho.mqtt.client import Client as MQTTClient, CallbackAPIVersion, MQTTv5 # type: ignore + + +def load_config(config_path: str) -> dict: + """ + Loads JSON configuration from the provided config path. + + Args: + config_path (str): Path to the JSON configuration file. + + Returns: + dict: The loaded configuration. + + Raises: + FileNotFoundError: If the file is not found. + ValueError: If the file cannot be decoded as JSON. + RuntimeError: For any other unexpected error. + """ + try: + with open(config_path, "r", encoding="utf-8") as file: + json_config = json.load(file) + print("JSON configuration loaded successfully.") + return json_config + except FileNotFoundError as exc: + raise FileNotFoundError( + f"Error: The file {config_path} was not found.") from exc + except json.JSONDecodeError as exc: + raise ValueError( + f"Error: The file {config_path} could not be decoded as JSON.") from exc + except Exception as exc: + raise RuntimeError(f"An unexpected error occurred: {exc}") from exc + + +def create_on_connect_callback(topics, qos): + """Creates an on_connect callback function for the MQTT client.""" + + # pylint: disable=unused-argument + def on_connect(client, _, __, rc, properties=None): # noqa: ARG001 + print(f"on_connect: Connected with response code {rc}") + if rc == 0: # Connection was successful + for topic in topics: + print(f"Subscribing to topic: {topic}") + client.subscribe(topic, qos=qos) + else: + print("Connection failed with result code:", rc) + + return on_connect + + +def create_on_subscribe_callback(): + """Creates an on_subscribe callback function for the MQTT client.""" + + # pylint: disable=unused-argument + def on_subscribe(_, __, mid, granted_qos, properties=None): # noqa: ARG001 + print( + f"on_subscribe: Subscription ID {mid} with QoS levels {granted_qos}") + + return on_subscribe + + +def create_on_message_callback(): + """Creates an on_message callback function for the MQTT client.""" + + def on_message(_, __, msg): # noqa: ARG001 + print(f"on_message: Received message on {msg.topic}") + + return on_message + + +def create_on_publish_callback(): + """Creates an on_publish callback function for the MQTT client.""" + + # pylint: disable=unused-argument + def on_publish(_, __, mid, *args, **kwargs): # noqa: ARG001 + print(f"on_publish: Message {mid} published.") + + return on_publish + + +def setup_mqtt_client(config, topic_index=0): + """ + Initializes an MQTT client using a specific topic index from the subscription list. + + Args: + config (dict): MQTT client configuration. + topic_index (int, optional): Index of the topic to subscribe to. Defaults to 0. + + Returns: + tuple: (MQTTClient, selected_topic) + """ + mqttc = MQTTClient( + client_id=config["ClientID"], + callback_api_version=CallbackAPIVersion.VERSION2, + protocol=MQTTv5, + ) + + if config["userId"]: + mqttc.username_pw_set(config["userId"], config["password"]) + + topics_list = config["TopicsToSubscribe"] + if topic_index < 0 or topic_index >= len(topics_list): + raise ValueError( + f"Invalid topic index: {topic_index}. Available range: 0-{len(topics_list) - 1}" + ) + + selected_topic = topics_list[topic_index] + + mqttc.on_connect = create_on_connect_callback( + [selected_topic], config["QoS"]) + mqttc.on_subscribe = create_on_subscribe_callback() + mqttc.on_message = create_on_message_callback() + mqttc.on_publish = create_on_publish_callback() + + return mqttc, selected_topic diff --git a/src/experiment_1.py b/src/experiment_1.py new file mode 100644 index 0000000..2a5da5c --- /dev/null +++ b/src/experiment_1.py @@ -0,0 +1,44 @@ +import time +import numpy as np # pylint: disable=unused-import + +# Project imports +from data.accel.hbk.accelerometer import Accelerometer # type: ignore +from data.sources.mqtt import setup_mqtt_client, load_config # type: ignore + + +def main(): + config = load_config("config/production.json") + mqtt_config = config["MQTT"] + + topic_index = 0 + mqtt_client, selected_topic = setup_mqtt_client(mqtt_config, topic_index) + mqtt_client.connect(mqtt_config["host"], mqtt_config["port"], 60) + mqtt_client.loop_start() + + # Initialize Accelerometer + accelerometer = Accelerometer( + mqtt_client, + topic=selected_topic, + map_size=192) + + # Clear stored data + with accelerometer.acquire_lock(): + accelerometer.data_map.clear() + + while True: + time.sleep(2.1) + + with accelerometer.acquire_lock(): + # This print to see the dictionary + for key, fifo in sorted(accelerometer.data_map.items()): + print(f"Key: {key} -> Data: {list(fifo)}\n") + _, data = accelerometer.read(requested_samples=128) + print("Data requsted", data) + #break + + #mqtt_client.loop_stop() + #print("Data requsted", data) + + +if __name__ == '__main__': + main() diff --git a/src/main.py b/src/main.py deleted file mode 100644 index 63248d4..0000000 --- a/src/main.py +++ /dev/null @@ -1,6 +0,0 @@ - -def main() -> None: - print("Hello World") - -if __name__ == "__main__": - main() diff --git a/tests/data/accel/hbk/constants.py b/tests/data/accel/hbk/constants.py new file mode 100644 index 0000000..03862f6 --- /dev/null +++ b/tests/data/accel/hbk/constants.py @@ -0,0 +1,9 @@ +BATCH_SIZE = 32 # Number of data samples in each message + +DESCRIPTOR_LENGTH = 28 # Fixed length of the descriptor section in bytes + +METADATA_VERSION = 2 # Version number for metadata, always set to 2 + +SECONDS = 1742400339 # Exaample value for seconds since epoch + +NANOSECONDS = 123456789 # Example nanoseconds value \ No newline at end of file diff --git a/tests/data/accel/hbk/test_accelerometer.py b/tests/data/accel/hbk/test_accelerometer.py new file mode 100644 index 0000000..83745f2 --- /dev/null +++ b/tests/data/accel/hbk/test_accelerometer.py @@ -0,0 +1,238 @@ +import time +import json +import pytest +import struct + +import numpy as np +from data.accel.hbk.accelerometer import Accelerometer # type: ignore +from constants import DESCRIPTOR_LENGTH, METADATA_VERSION, SECONDS, NANOSECONDS, BATCH_SIZE +from data.sources.mqtt import setup_mqtt_client, load_config +import uuid + + +@pytest.fixture(scope="function") +def mqtt_client(): + config = load_config("config/test.json") + mqtt_config = config["MQTT"].copy() + mqtt_config["ClientID"] = f"test_{uuid.uuid4().hex[:6]}" + + topic_index = 0 + client, selected_topic = setup_mqtt_client(mqtt_config, topic_index) + + client.connect(mqtt_config["host"], mqtt_config["port"], 60) + client.loop_start() + time.sleep(0.1) + + yield client, selected_topic + client.loop_stop() + client.disconnect() + + +@pytest.fixture(scope="function") +def client_and_topic(mqtt_client): + client, topic = mqtt_client + return client, topic + + +@pytest.fixture(scope="function") +def accelerometer_instance(client_and_topic): + client, topic = client_and_topic + return Accelerometer(client, topic=topic, map_size=192) + + +@pytest.fixture(autouse=True) +def clear_fifo(accelerometer_instance): + with accelerometer_instance._lock: + accelerometer_instance.data_map.clear() + yield + + +def publish_binary_samples(client, topic, start, end): + """Helper function to publish 32 samples per message.""" + for batch_start in range(start, end, BATCH_SIZE): + batch_end = min(batch_start + BATCH_SIZE, end) + data_samples = [struct.pack(" Expect [0, 1, ..., 29]. + 3. Read another 30 samples -> Expect [30, 31, ..., 59]. + 4. Verify: + - The retrieved samples match the expected sequences. + - The remaining samples in the buffer match expectations. + """ + client, topic = client_and_topic + + # Step 1: Publish 64 samples (values 0 to 63) + publish_binary_samples(client, topic, 0, 64) + total_samples = 0 + + while total_samples < 64: + with accelerometer_instance._lock: + total_samples = sum(len(deque) for deque in accelerometer_instance.data_map.values()) + + # Read the first 30 samples + status_1, data_1 = accelerometer_instance.read(30) + assert np.allclose(data_1, np.arange(30)), f"Order mismatch: {data_1[:10]}" + + # Read the next 30 samples + status_2, data_2 = accelerometer_instance.read(30) + assert np.allclose(data_2, np.arange(30, 60)), f"Order mismatch: {data_2[:10]}" + + with accelerometer_instance._lock: + remaining_samples = sum(len(deque) for deque in accelerometer_instance.data_map.values()) + + assert remaining_samples == 4, f"Expected 4 samples left, but found {remaining_samples}" + + +def test_accelerometer_read_full_fifo(client_and_topic, accelerometer_instance): + """ + Test that the accelerometer correctly stores and retrieves the full FIFO capacity. + + Steps: + 1. Publish exactly 96 samples. + 2. Read all 96 samples. + 3. Verify: + - The status is 1 (all requested samples retrieved). + - The shape is (96,). + - The data is in sequential order from 0 to 95. + """ + client, topic = client_and_topic + + publish_binary_samples(client, topic, 0, 96) + total_samples = 0 + + while total_samples < 96: + with accelerometer_instance._lock: + total_samples = sum(len(deque) for deque in accelerometer_instance.data_map.values()) + + status, data = accelerometer_instance.read(96) + + assert status == 1, f"Expected status 1, but got {status}" + assert data.shape == (96,), f"Unexpected shape: {data.shape}" + assert np.allclose(data, np.arange(96)), f"Order mismatch: {data[:10]}" + + +def test_accelerometer_read_partial_fifo(client_and_topic, accelerometer_instance): + """ + Test that when reading fewer samples than available, the oldest samples are retrieved. + + Steps: + 1. Publish 64 samples. + 2. Read only 32 samples. + 3. Verify: + - The status is 1 (exact number of requested samples retrieved). + - The shape is (32,). + - The first 32 samples (0–31) are retrieved. + """ + client, topic = client_and_topic + + publish_binary_samples(client, topic, 0, 64) + total_samples = 0 + + while total_samples < 64: + with accelerometer_instance._lock: + total_samples = sum(len(deque) for deque in accelerometer_instance.data_map.values()) + + status, data = accelerometer_instance.read(32) + + assert np.allclose(data, np.arange(32)), f"Order mismatch: {data[:10]}" + assert status == 1, f"Expected status 1, but got {status}" + assert data.shape == (32,), f"Unexpected shape: {data.shape}" + + +def test_accelerometer_read_insufficient_samples(client_and_topic, accelerometer_instance): + """ + Test that the accelerometer correctly handles cases where fewer samples exist than requested. + + Steps: + 1. Publish 64 samples. + 2. Request 96 samples (more than available). + 3. Verify: + - The status is 0 (not enough samples). + - The shape is (64,). + - All 64 samples are returned. + """ + client, topic = client_and_topic + + publish_binary_samples(client, topic, 0, 64) + total_samples = 0 + + while total_samples < 64: + with accelerometer_instance._lock: + total_samples = sum(len(deque) for deque in accelerometer_instance.data_map.values()) + + status, data = accelerometer_instance.read(96) + + assert status == 0, f"Expected status 0, but got {status}" + assert data.shape == (64,), f"Unexpected shape: {data.shape}" + + +def test_accelerometer_appending_more_samples_than_max(client_and_topic, accelerometer_instance): + """ + Test that when publishing more than the max FIFO size, only the most recent data is stored. + + The max FIFO size is set to 192, but we publish 224 samples. + + Steps: + 1. Publish 224 samples. + 2. Request all 224 samples (to check if old ones were removed). + 3. Verify: + - The status is 0 (not all requested samples are available). + - The shape is (192,). + """ + client, topic = client_and_topic + + publish_binary_samples(client, topic, 0, 224) + total_samples = 0 + + while total_samples < 192: + with accelerometer_instance._lock: + total_samples = sum(len(deque) for deque in accelerometer_instance.data_map.values()) + + status, data = accelerometer_instance.read(224) + + assert status == 0, f"Expected status 0, but got {status}" + assert data.shape == (192,), f"Unexpected shape: {data.shape}" + + +def test_accelerometer_reordering_late_sample(client_and_topic, accelerometer_instance): + """ + Simulates delayed delivery of the middle batch (32-63) and checks if the accelerometer correctly + orders samples based on `samples_from_daq_start`. + """ + client, topic = client_and_topic + + # Publish first batch (0–31) + publish_binary_samples(client, topic, 0, 32) + + # Publish last batch (64–95) BEFORE the middle batch + publish_binary_samples(client, topic, 64, 96) + + # Publish middle batch (32–63) AFTER last batch + publish_binary_samples(client, topic, 32, 64) + + # Wait for all samples to arrive + total_samples = 0 + while total_samples < 96: # Max wait time: 5 seconds + with accelerometer_instance._lock: + total_samples = sum(len(deque) for deque in accelerometer_instance.data_map.values()) + status, data = accelerometer_instance.read(96) + + + assert status == 1, f"Expected status 1, but got {status}" + assert data.shape == (96,), f"Unexpected shape: {data.shape}" + expected_data = np.arange(96) # Expected: 0, 1, 2, ..., 95 + assert np.allclose(data, expected_data), f"Data order mismatch! Got: {data[:10]}..." diff --git a/tests/cp-sens/data/sources/test_mqtt.py b/tests/data/sources/test_mqtt.py similarity index 80% rename from tests/cp-sens/data/sources/test_mqtt.py rename to tests/data/sources/test_mqtt.py index d3d871d..6779de8 100644 --- a/tests/cp-sens/data/sources/test_mqtt.py +++ b/tests/data/sources/test_mqtt.py @@ -64,25 +64,30 @@ def test_on_publish_callback(capsys): captured = capsys.readouterr().out assert "Message 99 published" in captured + def test_setup_mqtt_client(): dummy_config = { - "MQTT": { - "ClientID": "test_client", - "userId": "test_user", - "password": "test_pass", - "TopicsToSubscribe": ["test/topic1", "test/topic2"], - "QoS": 1, - "host": "localhost", - "port": 1883 - } + "ClientID": "test_client", # Ensure ClientID is at the correct level + "userId": "test_user", + "password": "test_pass", + "TopicsToSubscribe": ["test/topic1", "test/topic2"], + "QoS": 1, + "host": "localhost", + "port": 1883 } - client = setup_mqtt_client(dummy_config) + + # Unpack the returned tuple + client, selected_topic = setup_mqtt_client(dummy_config) + # Check that the client has the correct client_id. client_id = client._client_id.decode() if isinstance(client._client_id, bytes) else client._client_id assert client_id == "test_client" - - # Verify that all callback functions has been assigned. + + # Verify that all callback functions have been assigned. assert client.on_connect is not None assert client.on_subscribe is not None assert client.on_message is not None - assert client.on_publish is not None \ No newline at end of file + assert client.on_publish is not None + + # Optional: Ensure the correct topic is selected + assert selected_topic == "test/topic1" # Since topic_index defaults to 0