diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 769e3f2f4..04b98a9f4 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,2 +1,27 @@ +## Description + +Summary of the changes introduced in this PR. Try to use bullet points as much as possible. + +## Look & Feel + +This section can contain example pictures for UI, Input/Output for CLI, Request / Response for API endpoint, etc. + +## How to test + +Steps to test it or name of the tests functions. + +The library [flexmeasures-client](https://github.com/FlexMeasures/flexmeasures-client/) can be useful to showcase new features. For example, +it can be used to set some example data to be used in a new UI feature. + +## Further Improvements + +Potential improvements to be done in the same PR or follow up Issues/Discussions/PRs. + +## Related Items + +Mention if this PR closes an Issue or Project. + +--- + - [ ] I agree to contribute to the project under Apache 2 License. -- [ ] To the best of my knowledge, the proposed patch is not based on a code under GPL or other license that is incompatible with FlexMeasures \ No newline at end of file +- [ ] To the best of my knowledge, the proposed patch is not based on code under GPL or other license that is incompatible with FlexMeasures diff --git a/.github/workflows/lint-and-test.yml b/.github/workflows/lint-and-test.yml index 163591863..f8d561ee2 100644 --- a/.github/workflows/lint-and-test.yml +++ b/.github/workflows/lint-and-test.yml @@ -1,7 +1,10 @@ name: lint-and-test -on: ["push", "pull_request"] - +on: + push: + pull_request: + types: + - opened jobs: check: runs-on: ubuntu-latest @@ -35,7 +38,8 @@ jobs: run: | git fetch --prune --unshallow git fetch --depth=1 origin +refs/tags/*:refs/tags/* - - uses: actions/cache@v2 + - name: "Caching dependencies (txt)" + uses: actions/cache@v2 id: cache with: path: ${{ env.pythonLocation }} @@ -46,21 +50,16 @@ jobs: - run: | ci/setup-postgres.sh sudo apt-get -y install coinor-cbc - - name: Install FlexMeasures & dependencies for tests - if: steps.cache.outputs.cache-hit != 'true' - run: | - make install-pip-tools - make install-for-test - pip install coveralls - - name: Run all tests except those marked to be skipped by GitHub - run: pytest -m "not skip_github" - if: ${{ matrix.coverage != 'yes' }} + - name: Install FlexMeasures & exact dependencies for tests + run: make install-for-test + if: github.event_name == 'push' && steps.cache.outputs.cache-hit != 'true' + - name: Install FlexMeasures & latest dependencies for tests + run: make install-for-test pinned=no + if: github.event_name == 'pull_request' - name: Run all tests except those marked to be skipped by GitHub AND record coverage - run: pytest -v -m "not skip_github" --cov=flexmeasures --cov-branch - if: ${{ matrix.coverage == 'yes' }} - - run: coveralls --service=github - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: pytest -v -m "not skip_github" --cov=flexmeasures --cov-branch --cov-report=lcov + - name: Coveralls + uses: coverallsapp/github-action@v2 if: ${{ matrix.coverage == 'yes' }} env: PGHOST: 127.0.0.1 diff --git a/.gitignore b/.gitignore index 2ce31f0d1..dd173e77f 100644 --- a/.gitignore +++ b/.gitignore @@ -29,6 +29,7 @@ notebooks/.ipynb_checkpoints/ flexmeasures/ui/static/documentation documentation/img/screenshot_* +documentation/_autosummary/ generic_asset_fm_user_ownership.sql uml_diagram.png @@ -36,3 +37,5 @@ db_schema.png .coverage htmlcov +test/* +profile_reports/* diff --git a/.vscode/settings.json b/.vscode/settings.json index 618764c3f..9e744aa83 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -13,5 +13,10 @@ "python.linting.pylintEnabled": false, "python.linting.flake8Enabled": true, "workbench.editor.wrapTabs": true, - "python.formatting.provider": "black" + "python.formatting.provider": "black", + "python.testing.pytestArgs": [ + "flexmeasures" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true } diff --git a/.vscode/spellright.dict b/.vscode/spellright.dict index e8d5faa48..0d4c10887 100644 --- a/.vscode/spellright.dict +++ b/.vscode/spellright.dict @@ -251,3 +251,9 @@ Changelog Bugfixes Dockerfile nt +Backoffice +eval +dataframe +dataframes +args +docstrings diff --git a/Makefile b/Makefile index c8f6256cd..9af156159 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ # Note: use tabs # actions which are virtual, i.e. not a script -.PHONY: install install-for-dev install-deps install-flexmeasures run-local test freeze-deps upgrade-deps update-docs update-docs-pdf show-file-space show-data-model clean-db +.PHONY: install install-for-dev install-for-test install-deps install-flexmeasures run-local test freeze-deps upgrade-deps update-docs update-docs-pdf show-file-space show-data-model clean-db # ---- Development --- @@ -14,18 +14,20 @@ test: # ---- Documentation --- +gen_code_docs := False # by default code documentation is not generated + update-docs: @echo "Creating docs environment ..." make install-docs-dependencies @echo "Creating documentation ..." - cd documentation; make clean; make html SPHINXOPTS="-W --keep-going -n"; cd .. + export GEN_CODE_DOCS=${gen_code_docs}; cd documentation; make clean; make html SPHINXOPTS="-W --keep-going -n"; cd .. update-docs-pdf: @echo "NOTE: PDF documentation requires packages (on Debian: latexmk texlive-latex-recommended texlive-latex-extra texlive-fonts-recommended)" @echo "NOTE: Currently, the docs require some pictures which are not in the git repo atm. Ask the devs." make install-sphinx-tools - cd documentation; make clean; make latexpdf; make latexpdf; cd .. # make latexpdf can require two passes + export GEN_CODE_DOCS=${gen_code_docs}; cd documentation; make clean; make latexpdf; make latexpdf; cd .. # make latexpdf can require two passes # ---- Installation --- @@ -37,13 +39,27 @@ install-for-dev: make install-flexmeasures install-for-test: - pip-sync requirements/app.txt requirements/dev.txt requirements/test.txt + make install-pip-tools +# Pass pinned=no if you want to test against latest stable packages, default is our pinned dependency set +ifneq ($(pinned), no) + pip-sync requirements/app.txt requirements/test.txt +else + # cutting off the -c inter-layer dependency (that's pip-tools specific) + tail -n +3 requirements/test.in >> temp-test.in + pip install --upgrade -r requirements/app.in -r temp-test.in + rm temp-test.in +endif make install-flexmeasures install-deps: make install-pip-tools make freeze-deps +# Pass pinned=no if you want to test against latest stable packages, default is our pinned dependency set +ifneq ($(pinned), no) pip-sync requirements/app.txt +else + pip install --upgrade -r requirements/app.in +endif install-flexmeasures: pip install -e . diff --git a/documentation/_templates/custom-module-template.rst b/documentation/_templates/custom-module-template.rst new file mode 100644 index 000000000..ef91d2608 --- /dev/null +++ b/documentation/_templates/custom-module-template.rst @@ -0,0 +1,67 @@ +.. Adapted from https://stackoverflow.com/a/62613202 +{{ fullname | escape | underline}} + +{% block modules %} +{% if modules %} +.. rubric:: Modules + +.. autosummary:: + :toctree: + :template: custom-module-template.rst + :recursive: +{% for item in modules %} + {% if "test" not in item %} + {{ item }} + {% endif %} +{%- endfor %} +{% endif %} +{% endblock %} + +.. automodule:: {{ fullname }} + + {% block attributes %} + {% if attributes %} + .. rubric:: Module Attributes + + + {% for item in attributes %} + .. autoattribute:: + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block functions %} + {% if functions %} + .. rubric:: {{ _('Functions') }} + + {% for item in functions %} + .. autofunction:: + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block classes %} + {% if classes %} + .. rubric:: {{ _('Classes') }} + + {% for item in classes %} + .. autoclass:: {{ item }} + :members: + :special-members: __init__ + :private-members: + {%- endfor %} + {% endif %} + {% endblock %} + + {% block exceptions %} + {% if exceptions %} + .. rubric:: {{ _('Exceptions') }} + + {% for item in exceptions %} + .. autoexception:: + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} \ No newline at end of file diff --git a/documentation/api/change_log.rst b/documentation/api/change_log.rst index 406d898cc..0941ab77a 100644 --- a/documentation/api/change_log.rst +++ b/documentation/api/change_log.rst @@ -5,6 +5,17 @@ API change log .. note:: The FlexMeasures API follows its own versioning scheme. This is also reflected in the URL, allowing developers to upgrade at their own pace. +v3.0-11 | 2023-07-20 +"""""""""""""""""""" + +- Added REST endpoint for fetching one sensor: `/sensors/` (GET) + +v3.0-10 | 2023-06-12 +"""""""""""""""""""" + +- Introduced the ``storage-efficiency`` field to the ``flex-model``field for `/sensors//schedules/trigger` (POST). +- Introduced the ``database_redis`` optional field to the response of the endpoint `/health/ready` (GET). + v3.0-9 | 2023-04-26 """"""""""""""""""" diff --git a/documentation/api/introduction.rst b/documentation/api/introduction.rst index 13a105ca1..6a44c9646 100644 --- a/documentation/api/introduction.rst +++ b/documentation/api/introduction.rst @@ -45,7 +45,7 @@ Let's see what the ``/api`` endpoint returns: {'flexmeasures_version': '0.9.0', 'message': 'For these API versions endpoints are available. An authentication token can be requested at: /api/requestAuthToken. For a list of services, see https://flexmeasures.readthedocs.io', 'status': 200, - 'versions': ['v1', 'v1_1', 'v1_2', 'v1_3', 'v2_0', 'v3_0'] + 'versions': ['v3_0'] } So this tells us which API versions exist. For instance, we know that the latest API version is available at: @@ -57,6 +57,9 @@ So this tells us which API versions exist. For instance, we know that the latest Also, we can see that a list of endpoints is available on https://flexmeasures.readthedocs.io for each of these versions. +.. note:: Sunset API versions are still documented there, simply select an older version. + + .. _api_auth: Authentication @@ -111,6 +114,8 @@ When an API feature becomes obsolete, we deprecate it. Deprecation of major features doesn't happen a lot, but when it does, it happens in multiple stages, during which we support clients and hosts in adapting. For more information on our multi-stage deprecation approach and available options for FlexMeasures hosts, see :ref:`Deprecation and sunset for hosts`. +.. _api_deprecation_clients: + Clients ^^^^^^^ @@ -154,10 +159,9 @@ Hosts FlexMeasures versions go through the following stages for deprecating major features (such as API versions): -- :ref:`api_deprecation_stage_1`: status 200 (OK) with relevant headers, plus a toggle to 410 (Gone) for blackout tests +- :ref:`api_deprecation_stage_1`: status 200 (OK) with :ref:`relevant headers`, plus a toggle to 410 (Gone) for blackout tests - :ref:`api_deprecation_stage_2`: status 410 (Gone), plus a toggle to 200 (OK) for sunset rollbacks -- :ref:`api_deprecation_stage_3`: status 404 (Not Found), plus a toggle to 410 (Gone) for removal rollbacks -- :ref:`api_deprecation_stage_4`: status 404 (Not Found), and removal of relevant endpoints +- :ref:`api_deprecation_stage_3`: status 410 (Gone) Let's go over these stages in more detail. @@ -202,15 +206,4 @@ To enable this, just set the config setting ``FLEXMEASURES_API_SUNSET_ACTIVE = F Stage 3: Definitive sunset """""""""""""""""""""""""" -After upgrading to one of the next FlexMeasures versions (e.g. ``flexmeasures==0.14``), clients that call sunset endpoints will receive ``HTTP status 404 (Not Found)`` responses. -In case you need clients to receive the slightly more informative ``HTTP status 410 (Gone)`` for a little while longer, we will continue to support a "removal rollback". -To enable this, just set the config setting ``FLEXMEASURES_API_SUNSET_ACTIVE = True``. -This, just like in deprecation stages 1 and 2, leads to status 410 (Gone) responses. -Note that ``FLEXMEASURES_API_SUNSET_ACTIVE = False`` now leads to status 404 (Not Found) responses, unlike in deprecation stages 1 and 2, where this would have lead to status 200 (OK) responses. - -.. _api_deprecation_stage_4: - -Stage 4: Removal -"""""""""""""""" - -After upgrading to one of the next FlexMeasures versions (e.g. ``flexmeasures==0.15``), clients that call sunset endpoints will receive ``HTTP status 404 (Not Found)`` responses. +After upgrading to one of the next FlexMeasures versions (e.g. ``flexmeasures==0.14``), clients that call sunset endpoints will receive ``HTTP status 410 (Gone)`` responses. diff --git a/documentation/api/notation.rst b/documentation/api/notation.rst index 9b3de79d8..ff1f44b22 100644 --- a/documentation/api/notation.rst +++ b/documentation/api/notation.rst @@ -145,10 +145,8 @@ Technically, this is equal to: This intuitive convention allows us to reduce communication by sending univariate timeseries as arrays. -Notation for v1, v2 and v3 -"""""""""""""""""""""""""" -For version 1, 2 and 3 of the API, only equidistant timeseries data is expected to be communicated. Therefore: +In all current versions of the FlexMeasures API, only equidistant timeseries data is expected to be communicated. Therefore: - only the array notation should be used (first notation from above), - "start" should be a timestamp on the hour or a multiple of the sensor resolution thereafter (e.g. "16:10" works if the resolution is 5 minutes), and @@ -185,27 +183,37 @@ This means that API and CLI users don't have to send the whole flex model every Here are the three types of flexibility models you can expect to be built-in: -1) For storage devices (e.g. batteries, charge points, electric vehicle batteries connected to charge points), the schedule deals with the state of charge (SOC). +1) For **storage devices** (e.g. batteries, and :abbr:`EV (electric vehicle)` batteries connected to charge points), the schedule deals with the state of charge (SOC). - The possible flexibility parameters are: + The possible flexibility parameters are: - - ``soc-at-start`` (defaults to 0) - - ``soc-unit`` (kWh or MWh) - - ``soc-min`` (defaults to 0) - - ``soc-max`` (defaults to max soc target) - - ``soc-targets`` (defaults to NaN values) - - ``roundtrip-efficiency`` (defaults to 100%) - - ``prefer-charging-sooner`` (defaults to True, also signals a preference to discharge later) + - ``soc-at-start`` (defaults to 0) + - ``soc-unit`` (kWh or MWh) + - ``soc-min`` (defaults to 0) + - ``soc-max`` (defaults to max soc target) + - ``soc-minima`` (defaults to NaN values) + - ``soc-maxima`` (defaults to NaN values) + - ``soc-targets`` (defaults to NaN values) + - ``roundtrip-efficiency`` (defaults to 100%) + - ``storage-efficiency`` (defaults to 100%) [#]_ + - ``prefer-charging-sooner`` (defaults to True, also signals a preference to discharge later) - For some examples, see the `[POST] /sensors/(id)/schedules/trigger <../api/v3_0.html#post--api-v3_0-sensors-(id)-schedules-trigger>`_ endpoint docs. + .. [#] The storage efficiency (e.g. 95% or 0.95) to use for the schedule is applied over each time step equal to the sensor resolution. For example, a storage efficiency of 95 percent per (absolute) day, for scheduling a 1-hour resolution sensor, should be passed as a storage efficiency of :math:`0.95^{1/24} = 0.997865`. -2) Shiftable process - - .. todo:: A simple algorithm exists, needs integration into FlexMeasures and asset type clarified. + For some examples, see the `[POST] /sensors/(id)/schedules/trigger <../api/v3_0.html#post--api-v3_0-sensors-(id)-schedules-trigger>`_ endpoint docs. -3) Heat pumps +2) For **shiftable processes** - .. todo:: Also work in progress, needs model for heat loss compensation. + .. todo:: A simple and proven algorithm exists, but is awaiting proper integration into FlexMeasures, see `PR 729 `_. + +3) For **buffer devices** (e.g. thermal energy storage systems connected to heat pumps), use the same flexibility parameters described above for storage devices. Here are some tips to model a buffer with these parameters: + + - Describe the thermal energy content in kWh or MWh. + - Set ``soc-minima`` to the accumulative usage forecast. + - Set ``roundtrip-efficiency`` to the square of the conversion efficiency. [#]_ + - Set ``storage-efficiency`` to a value below 100% to model (heat) loss. + + .. [#] Setting a roundtrip efficiency of higher than 1 is not supported. We plan to implement a separate field for :abbr:`COP (coefficient of performance)` values. In addition, folks who write their own custom scheduler (see :ref:`plugin_customization`) might also require their custom flexibility model. That's no problem, FlexMeasures will let the scheduler decide which flexibility model is relevant and how it should be validated. diff --git a/documentation/changelog.rst b/documentation/changelog.rst index d9ec81230..31e7af3b2 100644 --- a/documentation/changelog.rst +++ b/documentation/changelog.rst @@ -3,16 +3,20 @@ FlexMeasures Changelog ********************** -v0.14.0 | June XX, 2023 +v0.15.0 | July XX, 2023 ============================ +.. warning:: Upgrading to this version requires running ``flexmeasures db upgrade`` (you can create a backup first with ``flexmeasures db-ops dump``). + New features ------------- -* Add multiple maxima and minima constraints into `StorageScheduler` [see `PR #680 `_] -* Introduction of the classes `Reporter` and `PandasReporter` [see `PR #641 `_] -* Add CLI command ``flexmeasures add report`` [see `PR #659 `_] -* Add CLI command ``flexmeasures show reporters`` [see `PR #686 `_] +* Allow deleting multiple sensors with a single call to ``flexmeasures delete sensor`` by passing the ``--id`` option multiple times [see `PR #734 `_] +* Make it a lot easier to read off the color legend on the asset page, especially when showing many sensors, as they will now be ordered from top to bottom in the same order as they appear in the chart (as defined in the ``sensors_to_show`` attribute), rather than alphabetically [see `PR #742 `_] +* Having percentages within the [0, 100] domain is such a common use case that we now always include it in sensor charts with % units, making it easier to read off individual charts and also to compare across charts [see `PR #739 `_] +* DataSource table now allows storing arbitrary attributes as a JSON (without content validation), similar to the Sensor and GenericAsset tables [see `PR #750 `_] +* Added API endpoint `/sensor/` for fetching a single sensor. [see `PR #759 `_] +* The CLI now allows to set lists and dicts as asset & sensor attributes (formerly only single values) [see `PR #762 `_] Bugfixes ----------- @@ -20,15 +24,86 @@ Bugfixes Infrastructure / Support ---------------------- +* Add support for profiling Flask API calls using ``pyinstrument`` (if installed). Can be enabled by setting the environment variable ``FLEXMEASURES_PROFILE_REQUESTS`` to ``True`` [see `PR #722 `_] +* The endpoint `[POST] /health/ready `_ returns the status of the Redis connection, if configured [see `PR #699 `_] +* Document the `device_scheduler` linear program [see `PR #764 `_]. + +/api/v3_0/health/ready + +v0.14.2 | July 25, 2023 +============================ + +Bugfixes +----------- + +* The error handling for infeasible constraints in storage.py was given too many arguments. This caused the response from the API to be unhelpful when a schedule was requested with infeasible constraints. [see `PR #758 `_] + + +v0.14.1 | June 26, 2023 +============================ + +Bugfixes +----------- + +* Relax constraint validation of `StorageScheduler` to accommodate violations caused by floating point precision [see `PR #731 `_] +* Avoid saving any :abbr:`NaN (not a number)` values to the database, when calling ``flexmeasures add report`` [see `PR #735 `_] +* Fix browser console error when loading asset or sensor page with only a single data point [see `PR #732 `_] +* Fix showing multiple sensors with bare 3-letter currency code as their units (e.g. EUR) in one chart [see `PR #738 `_] +* Fix defaults for the ``--start-offset`` and ``--end-offset`` options to ``flexmeasures add report``, which weren't being interpreted in the local timezone of the reporting sensor [see `PR #744 `_] +* Relax constraint for overlaying plot traces for sensors with various resolutions, making it possible to show e.g. two price sensors in one chart, where one of them records hourly prices and the other records quarter-hourly prices [see `PR #743 `_] +* Resolve bug where different page loads would potentially influence the time axis of each other's charts, by avoiding mutation of shared field definitions [see `PR #746 `_] + + +v0.14.0 | June 15, 2023 +============================ + +.. note:: Read more on these features on `the FlexMeasures blog `__. + +New features +------------- + +* Allow setting a storage efficiency using the new ``storage-efficiency`` field when calling `/sensors//schedules/trigger` (POST) through the API (within the ``flex-model`` field), or when calling ``flexmeasures add schedule for-storage`` through the CLI [see `PR #679 `_] +* Allow setting multiple :abbr:`SoC (state of charge)` maxima and minima constraints for the `StorageScheduler`, using the new ``soc-minima`` and ``soc-maxima`` fields when calling `/sensors//schedules/trigger` (POST) through the API (within the ``flex-model`` field) [see `PR #680 `_] +* New CLI command ``flexmeasures add report`` to calculate a custom report from sensor data and save the results to the database, with the option to export them to a CSV or Excel file [see `PR #659 `_] +* New CLI commands ``flexmeasures show reporters`` and ``flexmeasures show schedulers`` to list available reporters and schedulers, respectively, including any defined in registered plugins [see `PR #686 `_ and `PR #708 `_] +* Allow creating public assets through the CLI, which are available to all users [see `PR #727 `_] + +Bugfixes +----------- +* Fix charts not always loading over https in secured scenarios [see `PR #716 `_] + +Infrastructure / Support +---------------------- + +* Introduction of the classes `Reporter`, `PandasReporter` and `AggregatorReporter` to help customize your own reporter functions (experimental) [see `PR #641 `_ and `PR #712 `_] * The setting FLEXMEASURES_PLUGINS can be set as environment variable now (as a comma-separated list) [see `PR #660 `_] * Packaging was modernized to stop calling setup.py directly [see `PR #671 `_] -* Remove API versions 1.0, 1.1, 1.2, 1.3 and 2.0, while allowing hosts to switch between ``HTTP status 410 (Gone)`` and ``HTTP status 404 (Not Found)`` responses [see `PR #667 `_] +* Remove API versions 1.0, 1.1, 1.2, 1.3 and 2.0, while making sure that sunset endpoints keep returning ``HTTP status 410 (Gone)`` responses [see `PR #667 `_ and `PR #717 `_] +* Support Pandas 2 [see `PR #673 `_] +* Add code documentation from package structure and docstrings to official docs [see `PR #698 `_] .. warning:: The setting `FLEXMEASURES_PLUGIN_PATHS` has been deprecated since v0.7. It has now been sunset. Please replace it with :ref:`plugin-config`. +v0.13.3 | June 10, 2023 +======================= + +Bugfixes +--------- +* Fix forwarding arguments in deprecated util function [see `PR #719 `_] + + +v0.13.2 | June 9, 2023 +======================= + +Bugfixes +--------- +* Fix failing to save results of scheduling and reporting on subsequent calls for the same time period [see `PR #709 `_] + + v0.13.1 | May 12, 2023 ======================= + Bugfixes --------- * `@deprecated` not returning the output of the decorated function [see `PR #678 `_] @@ -70,6 +145,7 @@ Infrastructure / Support * Fix broken `make show-data-model` command [see `PR #638 `_] * Bash script for a clean database to run toy-tutorial by using `make clean-db db_name=database_name` command [see `PR #640 `_] + v0.12.3 | February 28, 2023 ============================ @@ -125,7 +201,7 @@ Bugfixes * The CLI command ``flexmeasures show beliefs`` now supports plotting time series data that includes NaN values, and provides better support for plotting multiple sensors that do not share the same unit [see `PR #516 `_ and `PR #539 `_] * Fixed JSON wrapping of return message for `/sensors/data` (GET) [see `PR #543 `_] * Consistent CLI/UI support for asset lat/lng positions up to 7 decimal places (previously the UI rounded to 4 decimal places, whereas the CLI allowed more than 4) [see `PR #522 `_] -* Stop trimming the planning window in response to price availability, which is a problem when SoC targets occur outside of the available price window, by making a simplistic assumption about future prices [see `PR #538 `_] +* Stop trimming the planning window in response to price availability, which is a problem when :abbr:`SoC (state of charge)` targets occur outside of the available price window, by making a simplistic assumption about future prices [see `PR #538 `_] * Faster loading of initial charts and calendar date selection [see `PR #533 `_] Infrastructure / Support @@ -151,13 +227,12 @@ Infrastructure / Support .. warning:: The CLI command ``flexmeasures add schedule`` has been renamed to ``flexmeasures add schedule for-storage``. The old name will be sunset in version 0.13. - v0.11.3 | November 2, 2022 ============================ Bugfixes ----------- -* Fix scheduling with imperfect efficiencies, which resulted in exceeding the device's lower SoC limit. [see `PR #520 `_] +* Fix scheduling with imperfect efficiencies, which resulted in exceeding the device's lower :abbr:`SoC (state of charge)` limit. [see `PR #520 `_] * Fix scheduler for Charge Points when taking into account inflexible devices [see `PR #517 `_] * Prevent rounding asset lat/long positions to 4 decimal places when editing an asset in the UI [see `PR #522 `_] @@ -223,6 +298,7 @@ Bugfixes ----------- * Fix some UI styling regressions in e.g. color contrast and hover effects [see `PR #441 `_] + v0.10.0 | May 8, 2022 =========================== @@ -477,7 +553,6 @@ Infrastructure / Support * add time_utils.get_recent_clock_time_window() function [see `PR #135 `_] - v0.4.1 | May 7, 2021 =========================== @@ -488,6 +563,7 @@ Bugfixes * Prevent logging out user when clearing the session [see `PR #112 `_] * Prevent user type data source to be created without setting a user [see `PR #111 `_] + v0.4.0 | April 29, 2021 =========================== diff --git a/documentation/cli/change_log.rst b/documentation/cli/change_log.rst index 5809aae72..2787c7e51 100644 --- a/documentation/cli/change_log.rst +++ b/documentation/cli/change_log.rst @@ -4,6 +4,26 @@ FlexMeasures CLI Changelog ********************** +since v0.15.0 | July XX, 2023 +================================= + +* Allow deleting multiple sensors with a single call to ``flexmeasures delete sensor`` by passing the ``--id`` option multiple times. + +since v0.14.1 | June XX, 2023 +================================= + +* Avoid saving any :abbr:`NaN (not a number)` values to the database, when calling ``flexmeasures add report``. +* Fix defaults for the ``--start-offset`` and ``--end-offset` options to ``flexmeasures add report``, which weren't being interpreted in the local timezone of the reporting sensor. + +since v0.14.0 | June 15, 2023 +================================= + +* Allow setting a storage efficiency using the new ``--storage-efficiency`` option to the ``flexmeasures add schedule for-storage`` CLI command. +* Add CLI command ``flexmeasures add report`` to calculate a custom report from sensor data and save the results to the database, with the option to export them to a CSV or Excel file. +* Add CLI command ``flexmeasures show reporters`` to list available reporters, including any defined in registered plugins. +* Add CLI command ``flexmeasures show schedulers`` to list available schedulers, including any defined in registered plugins. +* Make ``--account-id`` optional in ``flexmeasures add asset`` to support creating public assets, which are available to all users. + since v0.13.0 | May 1, 2023 ================================= diff --git a/documentation/cli/commands.rst b/documentation/cli/commands.rst index b06894fa3..ee31d2d54 100644 --- a/documentation/cli/commands.rst +++ b/documentation/cli/commands.rst @@ -55,6 +55,7 @@ of which some are referred to in this documentation. ``flexmeasures show data-sources`` List available data sources. ``flexmeasures show beliefs`` Plot time series data. ``flexmeasures show reporters`` List available reporters. +``flexmeasures show schedulers`` List available schedulers. ================================================= ======================================= diff --git a/documentation/concepts/device_scheduler.rst b/documentation/concepts/device_scheduler.rst new file mode 100644 index 000000000..60b9b90e6 --- /dev/null +++ b/documentation/concepts/device_scheduler.rst @@ -0,0 +1,177 @@ +.. _device_scheduler: + +Device scheduler +=========== + +Introduction +-------------- +This generic device scheduler is able to handle an EMS with multiple devices, with various types of constraints on the EMS level and on the device level, +and with multiple market commitments on the EMS level. + +A typical example is a house with many devices. The commitments are assumed to be with regard to the flow of energy to the device (positive for consumption, negative for production). In practice, this generic scheduler is used in the **StorageScheduler** to schedule a storage device. + +The solver minimises the costs of deviating from the commitments. + + + +Notation +--------- + +Indexes +^^^^^^^^ +================================ ================================================ ============================================================================================================== +Symbol Variable in the Code Description +================================ ================================================ ============================================================================================================== +:math:`c` c Commitments, for example, day-ahead or intra-day market commitments. +:math:`d` d Devices, for example, a battery or a load. +:math:`j` j 0-indexed time dimension. +================================ ================================================ ============================================================================================================== + +.. note:: + The time index :math:`j` has two interpretations: a time period or an instantaneous moment at the end of time period :math:`j`. + For example, :math:`j` in flow constraints correspond to time periods, whereas :math:`j` used in a stock constraint refers to the end of time period :math:`j`. + +Parameters +^^^^^^^^^^ +================================ ================================================ ============================================================================================================== +Symbol Variable in the Code Description +================================ ================================================ ============================================================================================================== +:math:`Price_{up}(c,j)` up_price Price of incurring an upwards deviations in commitment :math:`c` during time period :math:`j`. +:math:`Price_{down}(c,j)` down_price Price of incurring a downwards deviations in commitment :math:`c` during time period :math:`j`. +:math:`\eta_{up}(d,j)` device_derivative_up_efficiency Upwards conversion efficiency. +:math:`\eta_{down}(d,j)` device_derivative_down_efficiency Downwards conversion efficiency. +:math:`Stock_{min}(d,j)` device_min Minimum quantity for the stock of device :math:`d` at the end of time period :math:`j`. +:math:`Stock_{max}(d,j)` device_max Maximum quantity for the stock of device :math:`d` at the end of time period :math:`j`. +:math:`\epsilon(d,j)` efficiencies Stock energy losses. +:math:`P_{max}(d,j)` device_derivative_max Maximum flow of device :math:`d` during time period :math:`j`. +:math:`P_{min}(d,j)` device_derivative_min Minimum flow of device :math:`d` during time period :math:`j`. +:math:`P^{ems}_{min}(j)` ems_derivative_min Minimum flow of the EMS during time period :math:`j`. +:math:`P^{ems}_{max}(j)` ems_derivative_max Maximum flow of the EMS during time period :math:`j`. +:math:`Commitment(c,j)` commitment_quantity Commitment c (at EMS level) over time step :math:`j`. +================================ ================================================ ============================================================================================================== + + +Variables +^^^^^^^^^ +================================ ================================================ ============================================================================================================== +Symbol Variable in the Code Description +================================ ================================================ ============================================================================================================== +:math:`\Delta_{up}(c,j)` commitment_upwards_deviation Upwards deviation from the power commitment :math:`c` of the EMS during time period :math:`j`. +:math:`\Delta_{down}(c,j)` commitment_downwards_deviation Downwards deviation from the power commitment :math:`c` of the EMS during time period :math:`j`. +:math:`\Delta Stock(d,j)` n/a Change of stock of device :math:`d` at the end of time period :math:`j`. +:math:`P_{up}(d,j)` device_power_up Upwards power of device :math:`d` during time period :math:`j`. +:math:`P_{down}(d,j)` device_power_down Downwards power of device :math:`d` during time period :math:`j`. +:math:`P^{ems}(j)` ems_power Aggregated power of all the devices during time period :math:`j`. +================================ ================================================ ============================================================================================================== + +Cost function +-------------- + +The cost function quantifies the total cost of upwards and downwards deviations from the different commitments. + +.. math:: + :name: cost_function + + \min [\sum_{c,j} \Delta _{up}(c,j) \cdot Price_{up}(c,j) + \Delta_{down}(c,j) \cdot Price_{down}(c,j)] + + +State dynamics +--------------- + +To simplify the description of the model, the auxiliary variable :math:`\Delta Stock(d,j)` is introduced in the documentation. It represents the +change of :math:`Stock(d,j)`, taking into account conversion efficiencies but not considering the storage losses. + +.. math:: + :name: stock + + \Delta Stock(d,j) = \frac{P_{down}(d,j)}{\eta_{down}(d,j) } + P_{up}(d,j) \cdot \eta_{up}(d,j) + + + +.. math:: + :name: device_bounds + + Stock_{min}(d,j) \leq Stock(d,j) - Stock(d,-1)\leq Stock_{max}(d,j) + + +Perfect efficiency +^^^^^^^^^^^^^^^^^^^ + +.. math:: + :name: efficiency_e1 + + Stock(d, j) = Stock(d, j-1) + \Delta Stock(d,j) + +Left efficiency +^^^^^^^^^^^^^^^^^ +First apply the stock change, then apply the losses (i.e. the stock changes on the left side of the time interval in which the losses apply) + + +.. math:: + :name: efficiency_left + + Stock(d, j) = (Stock(d, j-1) + \Delta Stock(d,j)) \cdot \epsilon(d,j) + + +Right efficiency +^^^^^^^^^^^^^^^^^ +First apply the losses, then apply the stock change (i.e. the stock changes on the right side of the time interval in which the losses apply) + +.. math:: + :name: efficiency_right + + Stock(d, j) = Stock(d, j-1) \cdot \epsilon(d,j) + \Delta Stock(d,j) + +Linear efficiency +^^^^^^^^^^^^^^^^^ +Assume the change happens at a constant rate, leading to a linear stock change, and exponential decay, within the current interval + +.. math:: + :name: efficiency_linear + + Stock(d, j) = Stock(d, j-1) \cdot \epsilon(d,j) + \Delta Stock(d,j) \cdot \frac{\epsilon(d,j) - 1}{log(\epsilon(d,j))} + +Constraints +-------------- + +Device bounds +^^^^^^^^^^^^^ + +.. math:: + :name: device_derivative_bounds + + P_{min}(d,j) \leq P_{up}(d,j) + P_{down}(d,j)\leq P_{max}(d,j) + +.. math:: + :name: device_down_derivative_bounds + + min(P_{min}(d,j),0) \leq P_{down}(d,j)\leq 0 + + +.. math:: + :name: device_up_derivative_bounds + + 0 \leq P_{up}(d,j)\leq max(P_{max}(d,j),0) + + +Grid constraints +^^^^^^^^^^^^^^^^^ + +.. math:: + :name: device_derivative_equalities + + P^{ems}(d,j) = P_{up}(d,j) + P_{down}(d,j) + +.. math:: + :name: ems_derivative_bounds + + P^{ems}_{min}(j) \leq \sum_d P^{ems}(d,j) \leq P^{ems}_{max}(j) + +Power coupling constraints +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. math:: + :name: ems_flow_commitment_equalities + + \sum_d P^{ems}(d,j) = \sum_c Commitment(c,j) + \Delta {up}(c,j) + \Delta {down}(c,j) + diff --git a/documentation/conf.py b/documentation/conf.py index e6d727873..d8cd08d20 100644 --- a/documentation/conf.py +++ b/documentation/conf.py @@ -6,6 +6,9 @@ # full list see the documentation: # http://www.sphinx-doc.org/en/stable/config +import os +import shutil + from datetime import datetime from pkg_resources import get_distribution import sphinx_fontawesome @@ -41,10 +44,9 @@ # ones. extensions = [ "sphinx_rtd_theme", - "sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinx.ext.coverage", - "sphinx.ext.imgmath", + "sphinx.ext.mathjax", "sphinx.ext.ifconfig", "sphinx.ext.todo", "sphinx_copybutton", @@ -54,9 +56,32 @@ "sphinxcontrib.autohttp.flaskqref", ] +autodoc_default_options = {} + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] +# if GEN_CODE_DOCS is not found, the default is gen_code_docs=True +gen_code_docs = not bool( + os.environ.get("GEN_CODE_DOCS", "True").lower() in ("f", "false", "0") +) + + +# Generate code docs +if gen_code_docs: + + # Add dependencies + extensions.extend( + [ + "sphinx.ext.autosummary", + "sphinx.ext.autodoc.typehints", + "sphinx.ext.autodoc", + ] + ) +else: + if os.path.exists("_autosummary"): + shutil.rmtree("_autosummary") + # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # @@ -76,7 +101,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path . -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "_templates"] # Todo: these are not mature enough yet for release, or should be removed exclude_patterns.append("int/*.rst") @@ -196,7 +221,7 @@ # -- Options for intersphinx extension --------------------------------------- # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {"https://docs.python.org/3/": None} +intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} # -- Options for copybytton extension --------------------------------------- copybutton_prompt_is_regexp = True @@ -224,3 +249,10 @@ def setup(sphinx_app): "live", "env", # hard-coded, documentation is not server-specific for the time being ) + + if gen_code_docs: + from flexmeasures.app import create + + create( + env="documentation" + ) # we need to create the app for when sphinx imports modules that use current_app diff --git a/documentation/configuration.rst b/documentation/configuration.rst index 825e3f738..899a06d98 100644 --- a/documentation/configuration.rst +++ b/documentation/configuration.rst @@ -98,7 +98,15 @@ Default: ``"migrations/dumps"`` FLEXMEASURES_PROFILE_REQUESTS ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Whether to turn on a feature which times requests made through FlexMeasures. Interesting for developers. +If True, the processing time of requests are profiled. + +The overall time used by requests are logged to the console. In addiition, if `pyinstrument` is installed, then a profiling report is made (of time being spent in different function calls) for all Flask API endpoints. + +The profiling results are stored in the ``profile_reports`` folder in the instance directory. + +Note: Profile reports for API endpoints are overwritten on repetition of the same request. + +Interesting for developers. Default: ``False`` @@ -628,7 +636,7 @@ FLEXMEASURES_API_SUNSET_ACTIVE Allow control over the effect of sunsetting API versions. Specifically, if True, the endpoints of sunset API versions will return ``HTTP status 410 (Gone)`` status codes. -If False, these endpoints will either return ``HTTP status 404 (Not Found) status codes``, or work like before (including Deprecation and Sunset headers in their response), depending on whether the installed FlexMeasures version still contains the endpoint implementations. +If False, these endpoints will either return ``HTTP status 410 (Gone) status codes``, or work like before (including Deprecation and Sunset headers in their response), depending on whether the installed FlexMeasures version still contains the endpoint implementations. Default: ``False`` diff --git a/documentation/host/data.rst b/documentation/host/data.rst index 70c1f989f..562d41c81 100644 --- a/documentation/host/data.rst +++ b/documentation/host/data.rst @@ -28,13 +28,18 @@ Install We believe FlexMeasures works with Postgres above version 9 and we ourselves have run it with versions up to 14. -On Unix: +On Linux: .. code-block:: bash + $ # On Ubuntu and Debian, you can install postgres like this: $ sudo apt-get install postgresql-12 # replace 12 with the version available in your packages $ pip install psycopg2-binary + $ # On Fedora, you can install postgres like this: + $ sudo dnf install postgresql postgresql-server + $ sudo postgresql-setup --initdb --unit postgresql + On Windows: @@ -45,6 +50,35 @@ On Windows: * ``conda install psycopg2`` +Using Docker Compose: + + +Alternatively, you can use Docker Compose to run a postgres database. Use can use the following ``docker-compose.yml`` as a starting point: + + +.. code-block:: yaml + + version: '3.7' + + services: + postgres: + image: postgres:latest + restart: always + environment: + POSTGRES_USER: flexmeasures + POSTGRES_PASSWORD: this-is-your-secret-choice + POSTGRES_DB: flexmeasures + ports: + - 5432:5432 + volumes: + - ./postgres-data:/var/lib/postgresql/data + network_mode: host + +To run this, simply type ``docker-compose up`` in the directory where you saved the ``docker-compose.yml`` file. Pass the ``-d`` flag to run it in the background. + +This will create a postgres database in a directory ``postgres-data`` in your current working directory. You can change the password and database name to your liking. You can also change the port mapping to e.g. ``5433:5432`` if you already have a postgres database running on your host machine. + + Make sure postgres represents datetimes in UTC timezone ^^^^^^^^^^^^^ @@ -66,6 +100,8 @@ Then restart the postgres server. $ sudo service postgresql restart +.. note:: If you are using Docker to run postgres, the ``timezone`` setting is already set to ``UTC`` by default. + Create "flexmeasures" and "flexmeasures_test" databases and users ^^^^^^^^^^^^^ diff --git a/documentation/index.rst b/documentation/index.rst index 079454a77..597f9ad5e 100644 --- a/documentation/index.rst +++ b/documentation/index.rst @@ -173,14 +173,16 @@ The platform operator of FlexMeasures can be an Aggregator. concepts/inbuilt-smart-functionality concepts/algorithms concepts/security_auth + concepts/device_scheduler .. toctree:: :caption: Tutorials :maxdepth: 1 - tut/toy-example-from-scratch tut/installation + tut/toy-example-from-scratch + tut/toy-example-expanded tut/posting_data tut/forecasting_scheduling tut/building_uis @@ -200,11 +202,6 @@ The platform operator of FlexMeasures can be an Aggregator. api/introduction api/notation api/v3_0 - api/v2_0 - api/v1_3 - api/v1_2 - api/v1_1 - api/v1 api/dev api/change_log @@ -249,11 +246,25 @@ The platform operator of FlexMeasures can be an Aggregator. dev/docker-compose +.. autosummary:: + :caption: Code Documentation + :toctree: _autosummary/ + :template: custom-module-template.rst + :recursive: + + flexmeasures.api + flexmeasures.app + flexmeasures.auth + flexmeasures.cli + flexmeasures.data + flexmeasures.ui + flexmeasures.utils + -Code documentation ------------------- +.. Code documentation +.. ------------------ -Go To :ref:`source`. +.. Go To :ref:`source`. diff --git a/documentation/plugin/customisation.rst b/documentation/plugin/customisation.rst index 835122309..8230e47ee 100644 --- a/documentation/plugin/customisation.rst +++ b/documentation/plugin/customisation.rst @@ -42,7 +42,7 @@ The following minimal example gives you an idea of some meta information you can """ return pd.Series( self.sensor.get_attribute("capacity_in_mw"), - index=pd.date_range(self.start, self.end, freq=self.resolution, closed="left"), + index=pd.date_range(self.start, self.end, freq=self.resolution, inclusive="left"), ) def deserialize_config(self): diff --git a/documentation/source.rst b/documentation/source.rst deleted file mode 100644 index b5272dbb4..000000000 --- a/documentation/source.rst +++ /dev/null @@ -1,78 +0,0 @@ -:orphan: - -.. _source: - - -Detailed documentation of all internal modules -=============================================== - -.. todo:: This is not yet complete, see `issue #52 `_ - - -app ---- - -.. automodule:: flexmeasures.app - :members: - - -data.models ------------ - -.. automodule:: flexmeasures.data.models - :members: - - -utils -------- - -.. automodule:: flexmeasures.utils - :members: - -utils.calculations -~~~~~~~~~~~~~~~~~~~~ - -.. automodule:: flexmeasures.utils.calculations - :members: - -utils.time_utils -~~~~~~~~~~~~~~~~~ - -.. automodule:: flexmeasures.utils.time_utils - :members: - -ui.utils.view_utils -~~~~~~~~~~~~~~~~~~~ - -.. automodule:: flexmeasures.ui.utils.view_utils - :members: - - utils.forecasting_utils -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. automodule:: flexmeasures.utils.forecasting_utils - :members: - -ui.utils.plotting_utils -~~~~~~~~~~~~~~~~~~~~~~~ - -.. automodule:: flexmeasures.ui.utils.plotting_utils - :members: - - - -ui.views --------- - -.. automodule:: flexmeasures.ui.views.dashboard - :members: - -.. automodule:: flexmeasures.ui.views.portfolio - :members: - -.. automodule:: flexmeasures.ui.views.control - :members: - -.. automodule:: flexmeasures.ui.views.analytics - :members: - diff --git a/documentation/tut/installation.rst b/documentation/tut/installation.rst index 1a09a5a41..ab3ef1c35 100644 --- a/documentation/tut/installation.rst +++ b/documentation/tut/installation.rst @@ -3,8 +3,8 @@ Installation & First steps ================================= -Getting FlexMeasures to run ------------------------------ +Preparing FlexMeasures for running +------------------------------------ This section walks you through installing FlexMeasures on your own PC and running it continuously. We'll cover getting started by making a secret key, connecting a database and creating one user & one asset. @@ -194,12 +194,8 @@ Finally, you can tell FlexMeasures to create forecasts for your meter data with .. note:: You can also use the API to send forecast data. -Run FlexMeasures ------------------- - - -Running the web service -^^^^^^^^^^^^^^^^^^^^^^^^^^ +Running FlexMeasures as a web service +-------------------------------------- It's finally time to start running FlexMeasures: diff --git a/documentation/tut/posting_data.rst b/documentation/tut/posting_data.rst index 51d623dd5..ac60bafea 100644 --- a/documentation/tut/posting_data.rst +++ b/documentation/tut/posting_data.rst @@ -94,11 +94,11 @@ If this resolution does not match the sensor's resolution, FlexMeasures will try Likewise, if the data unit does not match the sensor’s unit, FlexMeasures will attempt to convert the data or, if that is not possible, complain. -Posting power data ------------------- +Being explicit when posting power data +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ For power data, USEF specifies separate message types for observations and forecasts. -Correspondingly, we allow the following message types to be used with the [POST] /sensors/data endpoint (see :ref:`posting_sensor_data`): +Correspondingly, we allow the following message types to be used with the `POST /sensors/data <../api/v3_0.html#post--api-v3_0-sensors-data>`_ endpoint: .. code-block:: json @@ -239,8 +239,8 @@ Multiple values (indicating a univariate timeseries) for 15-minute time interval .. _observations_vs_forecasts -Observations vs forecasts --------------------------- +Observations vs forecasts: The time of knowledge +------------------------------------------------- To correctly tell FlexMeasures when a meter reading or forecast was known is crucial, as it determines which data is being used to compute schedules or to make other forecasts. diff --git a/documentation/tut/toy-example-expanded.rst b/documentation/tut/toy-example-expanded.rst new file mode 100644 index 000000000..38457b51e --- /dev/null +++ b/documentation/tut/toy-example-expanded.rst @@ -0,0 +1,106 @@ +.. _tut_toy_schedule_expanded: + + + +Toy example II: Adding solar production and limited grid connection +==================================================================== + + +So far we haven't taken into account any other devices that consume or produce electricity. The battery was free to use all available capacity towards the grid. + +What if other devices will be using some of that capacity? Our schedules need to reflect that, so we stay within given limits. + +.. note:: The capacity is given by ``capacity_in_mw``, an attribute we placed on the battery asset earlier (see :ref:`tut_toy_schedule`). We will tell FlexMeasures to take the solar production into account (using ``--inflexible-device-sensor``) for this capacity limit. + +We'll now add solar production forecast data and then ask for a new schedule, to see the effect of solar on the available headroom for the battery. + + +Adding PV production forecasts +------------------------------ + +First, we'll create a new CSV file with solar forecasts (MW, see the setup for sensor 3 in part I of this tutorial) for tomorrow. + +.. code-block:: bash + + $ TOMORROW=$(date --date="next day" '+%Y-%m-%d') + $ echo "Hour,Price + $ ${TOMORROW}T00:00:00,0.0 + $ ${TOMORROW}T01:00:00,0.0 + $ ${TOMORROW}T02:00:00,0.0 + $ ${TOMORROW}T03:00:00,0.0 + $ ${TOMORROW}T04:00:00,0.01 + $ ${TOMORROW}T05:00:00,0.03 + $ ${TOMORROW}T06:00:00,0.06 + $ ${TOMORROW}T07:00:00,0.1 + $ ${TOMORROW}T08:00:00,0.14 + $ ${TOMORROW}T09:00:00,0.17 + $ ${TOMORROW}T10:00:00,0.19 + $ ${TOMORROW}T11:00:00,0.21 + $ ${TOMORROW}T12:00:00,0.22 + $ ${TOMORROW}T13:00:00,0.21 + $ ${TOMORROW}T14:00:00,0.19 + $ ${TOMORROW}T15:00:00,0.17 + $ ${TOMORROW}T16:00:00,0.14 + $ ${TOMORROW}T17:00:00,0.1 + $ ${TOMORROW}T18:00:00,0.06 + $ ${TOMORROW}T19:00:00,0.03 + $ ${TOMORROW}T20:00:00,0.01 + $ ${TOMORROW}T21:00:00,0.0 + $ ${TOMORROW}T22:00:00,0.0 + $ ${TOMORROW}T23:00:00,0.0" > solar-tomorrow.csv + +Then, we read in the created CSV file as beliefs data. +This time, different to above, we want to use a new data source (not the user) ― it represents whoever is making these solar production forecasts. +We create that data source first, so we can tell `flexmeasures add beliefs` to use it. +Setting the data source type to "forecaster" helps FlexMeasures to visually distinguish its data from e.g. schedules and measurements. + +.. note:: The ``flexmeasures add source`` command also allows to set a model and version, so sources can be distinguished in more detail. But that is not the point of this tutorial. See ``flexmeasures add source --help``. + +.. code-block:: bash + + $ flexmeasures add source --name "toy-forecaster" --type forecaster + Added source + $ flexmeasures add beliefs --sensor-id 3 --source 4 solar-tomorrow.csv --timezone Europe/Amsterdam + Successfully created beliefs + +The one-hour CSV data is automatically resampled to the 15-minute resolution of the sensor that is recording solar production. We can see solar production in the `FlexMeasures UI `_ : + +.. image:: https://github.com/FlexMeasures/screenshots/raw/main/tut/toy-schedule/sensor-data-production.png + :align: center +| + +.. note:: The ``flexmeasures add beliefs`` command has many options to make sure the read-in data is correctly interpreted (unit, timezone, delimiter, etc). But that is not the point of this tutorial. See ``flexmeasures add beliefs --help``. + + +Trigger an updated schedule +---------------------------- + +Now, we'll reschedule the battery while taking into account the solar production. This will have an effect on the available headroom for the battery, given the ``capacity_in_mw`` limit discussed earlier. + +.. code-block:: bash + + $ flexmeasures add schedule for-storage --sensor-id 1 --consumption-price-sensor 2 \ + --inflexible-device-sensor 3 \ + --start ${TOMORROW}T07:00+01:00 --duration PT12H \ + --soc-at-start 50% --roundtrip-efficiency 90% + New schedule is stored. + +We can see the updated scheduling in the `FlexMeasures UI `_ : + +.. image:: https://github.com/FlexMeasures/screenshots/raw/main/tut/toy-schedule/sensor-data-charging-with-solar.png + :align: center +| + +The `asset page for the battery `_ now shows the solar data, too: + +.. image:: https://github.com/FlexMeasures/screenshots/raw/main/tut/toy-schedule/asset-view-with-solar.png + :align: center + + +Though this schedule is quite similar, we can see that it has changed from `the one we computed earlier `_ (when we did not take solar into account). + +First, during the sunny hours of the day, when solar power is being send to the grid, the battery's output (at around 9am and 11am) is now lower, as the battery shares ``capacity_in_mw`` with the solar production. In the evening (around 7pm), when solar power is basically not present anymore, battery discharging to the grid is still at its previous levels. + +Second, charging of the battery is also changed a bit (around 10am), as less can be discharged later. + +We hope this part of the tutorial shows how to incorporate a limited grid connection rather easily with FlexMeasures. There are more ways to model such settings, but this is a straightforward one. \ No newline at end of file diff --git a/documentation/tut/toy-example-from-scratch.rst b/documentation/tut/toy-example-from-scratch.rst index b7531165c..8d7a5c083 100644 --- a/documentation/tut/toy-example-from-scratch.rst +++ b/documentation/tut/toy-example-from-scratch.rst @@ -51,7 +51,7 @@ Install Flexmeasures and the database $ docker run --rm --name flexmeasures-tutorial-fm --env SQLALCHEMY_DATABASE_URI=postgresql://postgres:fm-db-passwd@flexmeasures-tutorial-db:5432/flexmeasures-db --env SECRET_KEY=notsecret --env FLASK_ENV=development --env LOGGING_LEVEL=INFO -d --network=flexmeasures_network -p 5000:5000 lfenergy/flexmeasures $ docker exec flexmeasures-tutorial-fm bash -c "flexmeasures db upgrade" - .. note:: A tip on Linux/macOS ― You might have the ``docker`` command, but need `sudo` rights to execute it. ``alias docker='sudo docker'`` enables you to still run this tutorial. + .. note:: A tip on Linux/macOS ― You might have the ``docker`` command, but need `sudo` rights to execute it. ``alias docker='sudo docker'`` enables you to still run this tutorial. Alternatively, you can add yourself to the ``docker`` group. See `here `_ for more information. Now - what's *very important* to remember is this: The rest of this tutorial will happen *inside* the ``flexmeasures-tutorial-fm`` container! This is how you hop inside the container and run a terminal there: @@ -281,7 +281,7 @@ Make a schedule Finally, we can create the schedule, which is the main benefit of FlexMeasures (smart real-time control). -We'll ask FlexMeasures for a schedule for our discharging sensor (ID 1). We also need to specify what to optimise against. Here we pass the Id of our market price sensor (3). +We'll ask FlexMeasures for a schedule for our discharging sensor (ID 1). We also need to specify what to optimize against. Here we pass the Id of our market price sensor (3). To keep it short, we'll only ask for a 12-hour window starting at 7am. Finally, the scheduler should know what the state of charge of the battery is when the schedule starts (50%) and what its roundtrip efficiency is (90%). .. code-block:: bash @@ -330,7 +330,7 @@ We can also look at the charging schedule in the `FlexMeasures UI `_ shows both prices and the schedule. @@ -340,83 +340,4 @@ The `asset page for the battery `_ shows both p .. note:: The ``flexmeasures add schedule for-storage`` command also accepts state-of-charge targets, so the schedule can be more sophisticated. But that is not the point of this tutorial. See ``flexmeasures add schedule for-storage --help``. - -Take into account solar production ---------------------------------------- - -So far we haven't taken into account any other devices that consume or produce electricity. We'll now add solar production forecasts and reschedule, to see the effect of solar on the available headroom for the battery. - -First, we'll create a new csv file with solar forecasts (MW, see the setup for sensor 3 above) for tomorrow. - -.. code-block:: bash - - $ TOMORROW=$(date --date="next day" '+%Y-%m-%d') - $ echo "Hour,Price - $ ${TOMORROW}T00:00:00,0.0 - $ ${TOMORROW}T01:00:00,0.0 - $ ${TOMORROW}T02:00:00,0.0 - $ ${TOMORROW}T03:00:00,0.0 - $ ${TOMORROW}T04:00:00,0.01 - $ ${TOMORROW}T05:00:00,0.03 - $ ${TOMORROW}T06:00:00,0.06 - $ ${TOMORROW}T07:00:00,0.1 - $ ${TOMORROW}T08:00:00,0.14 - $ ${TOMORROW}T09:00:00,0.17 - $ ${TOMORROW}T10:00:00,0.19 - $ ${TOMORROW}T11:00:00,0.21 - $ ${TOMORROW}T12:00:00,0.22 - $ ${TOMORROW}T13:00:00,0.21 - $ ${TOMORROW}T14:00:00,0.19 - $ ${TOMORROW}T15:00:00,0.17 - $ ${TOMORROW}T16:00:00,0.14 - $ ${TOMORROW}T17:00:00,0.1 - $ ${TOMORROW}T18:00:00,0.06 - $ ${TOMORROW}T19:00:00,0.03 - $ ${TOMORROW}T20:00:00,0.01 - $ ${TOMORROW}T21:00:00,0.0 - $ ${TOMORROW}T22:00:00,0.0 - $ ${TOMORROW}T23:00:00,0.0" > solar-tomorrow.csv - -Then, we read in the created CSV file as beliefs data. -This time, different to above, we want to use a new data source (not the user) ― it represents whoever is making these solar production forecasts. -We create that data source first, so we can tell `flexmeasures add beliefs` to use it. -Setting the data source type to "forecaster" helps FlexMeasures to visualize distinguish its data from e.g. schedules and measurements. - -.. note:: The ``flexmeasures add source`` command also allows to set a model and version, so sources can be distinguished in more detail. But that is not the point of this tutorial. See ``flexmeasures add source --help``. - -.. code-block:: bash - - $ flexmeasures add source --name "toy-forecaster" --type forecaster - Added source - $ flexmeasures add beliefs --sensor-id 3 --source 4 solar-tomorrow.csv --timezone Europe/Amsterdam - Successfully created beliefs - -The one-hour CSV data is automatically resampled to the 15-minute resolution of the sensor that is recording solar production. We can see solar production in the `FlexMeasures UI `_ : - -.. image:: https://github.com/FlexMeasures/screenshots/raw/main/tut/toy-schedule/sensor-data-production.png - :align: center -| - -.. note:: The ``flexmeasures add beliefs`` command has many options to make sure the read-in data is correctly interpreted (unit, timezone, delimiter, etc). But that is not the point of this tutorial. See ``flexmeasures add beliefs --help``. - -Now, we'll reschedule the battery while taking into account the solar production. This will have an effect on the available headroom for the battery. - -.. code-block:: bash - - $ flexmeasures add schedule for-storage --sensor-id 1 --consumption-price-sensor 2 \ - --inflexible-device-sensor 3 \ - --start ${TOMORROW}T07:00+01:00 --duration PT12H \ - --soc-at-start 50% --roundtrip-efficiency 90% - New schedule is stored. - -We can see the updated scheduling in the `FlexMeasures UI `_ : - -.. image:: https://github.com/FlexMeasures/screenshots/raw/main/tut/toy-schedule/sensor-data-charging-with-solar.png - :align: center -| - -The `asset page for the battery `_ now shows the solar data, too. - -.. image:: https://github.com/FlexMeasures/screenshots/raw/main/tut/toy-schedule/asset-view-with-solar.png - :align: center -| +This tutorial showed the fastest way to a schedule. In :ref:`tut_toy_schedule_expanded`, we'll go further into settings with more realistic ingredients: solar panels and a limited grid connection. \ No newline at end of file diff --git a/flexmeasures/api/__init__.py b/flexmeasures/api/__init__.py index bf8075017..cd536fd29 100644 --- a/flexmeasures/api/__init__.py +++ b/flexmeasures/api/__init__.py @@ -1,3 +1,7 @@ +""" +FlexMeasures API routes and implementations. +""" + from flask import Flask, Blueprint, request from flask_security.utils import verify_password from flask_json import as_json @@ -105,18 +109,10 @@ def register_at(app: Flask): play_register_at(app) # Load all versions of the API functionality - from flexmeasures.api.v1 import register_at as v1_register_at - from flexmeasures.api.v1_1 import register_at as v1_1_register_at - from flexmeasures.api.v1_2 import register_at as v1_2_register_at - from flexmeasures.api.v1_3 import register_at as v1_3_register_at - from flexmeasures.api.v2_0 import register_at as v2_0_register_at from flexmeasures.api.v3_0 import register_at as v3_0_register_at from flexmeasures.api.dev import register_at as dev_register_at + from flexmeasures.api.sunset import register_at as sunset_register_at - v1_register_at(app) - v1_1_register_at(app) - v1_2_register_at(app) - v1_3_register_at(app) - v2_0_register_at(app) v3_0_register_at(app) dev_register_at(app) + sunset_register_at(app) diff --git a/flexmeasures/api/common/__init__.py b/flexmeasures/api/common/__init__.py index b9ba8d904..a73eeef17 100644 --- a/flexmeasures/api/common/__init__.py +++ b/flexmeasures/api/common/__init__.py @@ -1,3 +1,7 @@ +""" +Functionality common to all API versions. +""" + from flask import Flask, Blueprint diff --git a/flexmeasures/api/common/utils/deprecation_utils.py b/flexmeasures/api/common/utils/deprecation_utils.py index 2a21277d3..bfe02f69d 100644 --- a/flexmeasures/api/common/utils/deprecation_utils.py +++ b/flexmeasures/api/common/utils/deprecation_utils.py @@ -11,39 +11,40 @@ def sunset_blueprint( blueprint, - api_version_sunset: str, + api_version_being_sunset: str, sunset_link: str, api_version_upgrade_to: str = "3.0", - blueprint_contents_removed: bool = True, + rollback_possible: bool = True, + **kwargs, ): """Sunsets every route on a blueprint by returning 410 (Gone) responses, if sunset is active. Whether the sunset is active can be toggled using the config setting "FLEXMEASURES_API_SUNSET_ACTIVE". - If inactive, either: - - return 404 (Not Found) if the blueprint contents have been removed, or - - pass the request to be handled by the endpoint implementation. + If the sunset is inactive, this function will not affect any requests in this blueprint. + If the endpoint implementations have been removed, set rollback_possible=False. Errors will be logged by utils.error_utils.error_handling_router. """ - def let_host_switch_to_returning_410(): + def return_410_unless_host_rolls_back_sunrise(): - # Override with custom info link, if set by host - _sunset_link = override_from_config(sunset_link, "FLEXMEASURES_API_SUNSET_LINK") + if ( + rollback_possible + and not current_app.config["FLEXMEASURES_API_SUNSET_ACTIVE"] + ): + # Sunset is inactive and blueprint contents should still be there, + # so we let the request pass to the endpoint implementation + pass + else: + # Override with custom info link, if set by host + link = override_from_config(sunset_link, "FLEXMEASURES_API_SUNSET_LINK") - if current_app.config["FLEXMEASURES_API_SUNSET_ACTIVE"]: abort( 410, - f"API version {api_version_sunset} has been sunset. Please upgrade to API version {api_version_upgrade_to}. See {_sunset_link} for more information.", + f"API version {api_version_being_sunset} has been sunset. Please upgrade to API version {api_version_upgrade_to}. See {link} for more information.", ) - elif blueprint_contents_removed: - abort(404) - else: - # Sunset is inactive and blueprint contents are still there, - # so we let the request pass to the endpoint implementation - pass - blueprint.before_request(let_host_switch_to_returning_410) + blueprint.before_request(return_410_unless_host_rolls_back_sunrise) def deprecate_fields( @@ -128,6 +129,7 @@ def deprecate_blueprint( deprecation_link: str | None = None, sunset_date: pd.Timestamp | str | None = None, sunset_link: str | None = None, + **kwargs, ): """Deprecates every route on a blueprint by adding the "Deprecation" header with a deprecation date. diff --git a/flexmeasures/api/dev/__init__.py b/flexmeasures/api/dev/__init__.py index fbde80d1a..cfe8bbf00 100644 --- a/flexmeasures/api/dev/__init__.py +++ b/flexmeasures/api/dev/__init__.py @@ -1,3 +1,7 @@ +""" +Endpoints under development. Use at your own risk. +""" + from flask import Flask diff --git a/flexmeasures/api/play/__init__.py b/flexmeasures/api/play/__init__.py index 1eb3a8144..5ef843727 100644 --- a/flexmeasures/api/play/__init__.py +++ b/flexmeasures/api/play/__init__.py @@ -1,3 +1,7 @@ +""" +Endpoints to support "play" mode, data restoration +""" + from flask import Flask, Blueprint # The api blueprint. It is registered with the Flask app (see app.py) diff --git a/flexmeasures/api/sunset/__init__.py b/flexmeasures/api/sunset/__init__.py new file mode 100644 index 000000000..dc45232bd --- /dev/null +++ b/flexmeasures/api/sunset/__init__.py @@ -0,0 +1,77 @@ +""" +A place to keep all routes to endpoints that previously existed and are now sunset. +""" + +from flask import Flask, Blueprint + +from flexmeasures.api.common.utils.deprecation_utils import ( + deprecate_blueprint, + sunset_blueprint, +) + + +# The sunset API blueprints. They are registered with the Flask app (see register_at) +flexmeasures_api_v1 = Blueprint("flexmeasures_api_v1", __name__) +flexmeasures_api_v1_1 = Blueprint("flexmeasures_api_v1_1", __name__) +flexmeasures_api_v1_2 = Blueprint("flexmeasures_api_v1_2", __name__) +flexmeasures_api_v1_3 = Blueprint("flexmeasures_api_v1_3", __name__) +flexmeasures_api_v2_0 = Blueprint("flexmeasures_api_v2_0", __name__) + +SUNSET_INFO = [ + dict( + blueprint=flexmeasures_api_v1, + api_version_being_sunset="1.0", + deprecation_date="2022-12-14", + deprecation_link="https://flexmeasures.readthedocs.io/en/latest/api/introduction.html#deprecation-and-sunset", + sunset_date="2023-05-01", + sunset_link="https://flexmeasures.readthedocs.io/en/v0.13.0/api/v1.html", + ), + dict( + blueprint=flexmeasures_api_v1_1, + api_version_being_sunset="1.1", + deprecation_date="2022-12-14", + deprecation_link="https://flexmeasures.readthedocs.io/en/latest/api/introduction.html#deprecation-and-sunset", + sunset_date="2023-05-01", + sunset_link="https://flexmeasures.readthedocs.io/en/v0.13.0/api/v1_1.html", + ), + dict( + blueprint=flexmeasures_api_v1_2, + api_version_being_sunset="1.2", + deprecation_date="2022-12-14", + deprecation_link="https://flexmeasures.readthedocs.io/en/latest/api/introduction.html#deprecation-and-sunset", + sunset_date="2023-05-01", + sunset_link="https://flexmeasures.readthedocs.io/en/v0.13.0/api/v1_2.html", + ), + dict( + blueprint=flexmeasures_api_v1_3, + api_version_being_sunset="1.3", + deprecation_date="2022-12-14", + deprecation_link="https://flexmeasures.readthedocs.io/en/latest/api/introduction.html#deprecation-and-sunset", + sunset_date="2023-05-01", + sunset_link="https://flexmeasures.readthedocs.io/en/v0.13.0/api/v1_3.html", + ), + dict( + blueprint=flexmeasures_api_v2_0, + api_version_being_sunset="2.0", + deprecation_date="2022-12-14", + deprecation_link="https://flexmeasures.readthedocs.io/en/latest/api/introduction.html#deprecation-and-sunset", + sunset_date="2023-05-01", + sunset_link="https://flexmeasures.readthedocs.io/en/v0.13.0/api/v2_0.html", + ), +] + +for info in SUNSET_INFO: + deprecate_blueprint(**info) + sunset_blueprint(**info, rollback_possible=False) + + +def register_at(app: Flask): + """This can be used to register this blueprint together with other api-related things""" + + import flexmeasures.api.sunset.routes # noqa: F401 this is necessary to load the endpoints + + app.register_blueprint(flexmeasures_api_v1, url_prefix="/api/v1") + app.register_blueprint(flexmeasures_api_v1_1, url_prefix="/api/v1_1") + app.register_blueprint(flexmeasures_api_v1_2, url_prefix="/api/v1_2") + app.register_blueprint(flexmeasures_api_v1_3, url_prefix="/api/v1_3") + app.register_blueprint(flexmeasures_api_v2_0, url_prefix="/api/v2_0") diff --git a/flexmeasures/api/sunset/routes.py b/flexmeasures/api/sunset/routes.py new file mode 100644 index 000000000..0ced0c3a0 --- /dev/null +++ b/flexmeasures/api/sunset/routes.py @@ -0,0 +1,61 @@ +from flexmeasures.api.sunset import ( + flexmeasures_api_v1, + flexmeasures_api_v1_1, + flexmeasures_api_v1_2, + flexmeasures_api_v1_3, + flexmeasures_api_v2_0, +) + + +@flexmeasures_api_v1.route("/getMeterData", methods=["GET", "POST"]) +@flexmeasures_api_v1.route("/postMeterData", methods=["POST"]) +@flexmeasures_api_v1.route("/getService", methods=["GET"]) +@flexmeasures_api_v1_1.route("/getConnection", methods=["GET"]) +@flexmeasures_api_v1_1.route("/postPriceData", methods=["POST"]) +@flexmeasures_api_v1_1.route("/postWeatherData", methods=["POST"]) +@flexmeasures_api_v1_1.route("/getPrognosis", methods=["GET"]) +@flexmeasures_api_v1_1.route("/postPrognosis", methods=["POST"]) +@flexmeasures_api_v1_1.route("/getMeterData", methods=["GET"]) +@flexmeasures_api_v1_1.route("/postMeterData", methods=["POST"]) +@flexmeasures_api_v1_1.route("/getService", methods=["GET"]) +@flexmeasures_api_v1_2.route("/getDeviceMessage", methods=["GET"]) +@flexmeasures_api_v1_2.route("/postUdiEvent", methods=["POST"]) +@flexmeasures_api_v1_2.route("/getConnection", methods=["GET"]) +@flexmeasures_api_v1_2.route("/postPriceData", methods=["POST"]) +@flexmeasures_api_v1_2.route("/postWeatherData", methods=["POST"]) +@flexmeasures_api_v1_2.route("/getPrognosis", methods=["GET"]) +@flexmeasures_api_v1_2.route("/getMeterData", methods=["GET"]) +@flexmeasures_api_v1_2.route("/postMeterData", methods=["POST"]) +@flexmeasures_api_v1_2.route("/postPrognosis", methods=["POST"]) +@flexmeasures_api_v1_2.route("/getService", methods=["GET"]) +@flexmeasures_api_v1_3.route("/getDeviceMessage", methods=["GET"]) +@flexmeasures_api_v1_3.route("/postUdiEvent", methods=["POST"]) +@flexmeasures_api_v1_3.route("/getConnection", methods=["GET"]) +@flexmeasures_api_v1_3.route("/postPriceData", methods=["POST"]) +@flexmeasures_api_v1_3.route("/postWeatherData", methods=["POST"]) +@flexmeasures_api_v1_3.route("/getPrognosis", methods=["GET"]) +@flexmeasures_api_v1_3.route("/getMeterData", methods=["GET"]) +@flexmeasures_api_v1_3.route("/postMeterData", methods=["POST"]) +@flexmeasures_api_v1_3.route("/postPrognosis", methods=["POST"]) +@flexmeasures_api_v1_3.route("/getService", methods=["GET"]) +@flexmeasures_api_v2_0.route("/assets", methods=["GET"]) +@flexmeasures_api_v2_0.route("/assets", methods=["POST"]) +@flexmeasures_api_v2_0.route("/asset/", methods=["GET"]) +@flexmeasures_api_v2_0.route("/asset/", methods=["PATCH"]) +@flexmeasures_api_v2_0.route("/asset/", methods=["DELETE"]) +@flexmeasures_api_v2_0.route("/users", methods=["GET"]) +@flexmeasures_api_v2_0.route("/user/", methods=["GET"]) +@flexmeasures_api_v2_0.route("/user/", methods=["PATCH"]) +@flexmeasures_api_v2_0.route("/user//password-reset", methods=["PATCH"]) +@flexmeasures_api_v2_0.route("/getConnection", methods=["GET"]) +@flexmeasures_api_v2_0.route("/postPriceData", methods=["POST"]) +@flexmeasures_api_v2_0.route("/postWeatherData", methods=["POST"]) +@flexmeasures_api_v2_0.route("/getPrognosis", methods=["GET"]) +@flexmeasures_api_v2_0.route("/getMeterData", methods=["GET"]) +@flexmeasures_api_v2_0.route("/postMeterData", methods=["POST"]) +@flexmeasures_api_v2_0.route("/postPrognosis", methods=["POST"]) +@flexmeasures_api_v2_0.route("/getService", methods=["GET"]) +@flexmeasures_api_v2_0.route("/getDeviceMessage", methods=["GET"]) +@flexmeasures_api_v2_0.route("/postUdiEvent", methods=["POST"]) +def implementation_gone(): + pass diff --git a/flexmeasures/config/__init__.py b/flexmeasures/api/sunset/tests/__init__.py similarity index 100% rename from flexmeasures/config/__init__.py rename to flexmeasures/api/sunset/tests/__init__.py diff --git a/flexmeasures/api/sunset/tests/test_sunset.py b/flexmeasures/api/sunset/tests/test_sunset.py new file mode 100644 index 000000000..9d87264f6 --- /dev/null +++ b/flexmeasures/api/sunset/tests/test_sunset.py @@ -0,0 +1,22 @@ +import pytest + +from flask import url_for + + +@pytest.mark.parametrize( + "blueprint, api_version_being_sunset", + [ + ("flexmeasures_api_v1", "1.0"), + ("flexmeasures_api_v1_1", "1.1"), + ("flexmeasures_api_v1_2", "1.2"), + ("flexmeasures_api_v1_3", "1.3"), + ("flexmeasures_api_v2_0", "2.0"), + ], +) +def test_sunset(client, blueprint, api_version_being_sunset): + gone = client.get(url_for(f"{blueprint}.implementation_gone")) + assert gone.status_code == 410 + assert ( + f"API version {api_version_being_sunset} has been sunset" + in gone.json["message"] + ) diff --git a/flexmeasures/api/v1/__init__.py b/flexmeasures/api/v1/__init__.py deleted file mode 100644 index 0fdf4d196..000000000 --- a/flexmeasures/api/v1/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -from flask import Flask, Blueprint - -from flexmeasures.api.common.utils.deprecation_utils import ( - deprecate_blueprint, - sunset_blueprint, -) - - -# The api blueprint. It is registered with the Flask app (see register_at) -flexmeasures_api = Blueprint("flexmeasures_api_v1", __name__) -deprecate_blueprint( - flexmeasures_api, - deprecation_date="2022-12-14", - deprecation_link="https://flexmeasures.readthedocs.io/en/latest/api/introduction.html#deprecation-and-sunset", - sunset_date="2023-05-01", - sunset_link="https://flexmeasures.readthedocs.io/en/v0.13.0/api/v1.html", -) -sunset_blueprint( - flexmeasures_api, - "1.0", - "https://flexmeasures.readthedocs.io/en/v0.13.0/api/v1.html", -) - - -def register_at(app: Flask): - """This can be used to register this blueprint together with other api-related things""" - - import flexmeasures.api.v1.routes # noqa: F401 this is necessary to load the endpoints - - app.register_blueprint(flexmeasures_api, url_prefix="/api/v1") diff --git a/flexmeasures/api/v1/routes.py b/flexmeasures/api/v1/routes.py deleted file mode 100644 index ce26bfc98..000000000 --- a/flexmeasures/api/v1/routes.py +++ /dev/null @@ -1,16 +0,0 @@ -from flexmeasures.api.v1 import flexmeasures_api as flexmeasures_api_v1 - - -@flexmeasures_api_v1.route("/getMeterData", methods=["GET", "POST"]) -def get_meter_data(): - pass - - -@flexmeasures_api_v1.route("/postMeterData", methods=["POST"]) -def post_meter_data(): - pass - - -@flexmeasures_api_v1.route("/getService", methods=["GET"]) -def get_service(): - pass diff --git a/flexmeasures/api/v1_1/__init__.py b/flexmeasures/api/v1_1/__init__.py deleted file mode 100644 index 774808627..000000000 --- a/flexmeasures/api/v1_1/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -from flask import Flask, Blueprint - -from flexmeasures.api.common.utils.deprecation_utils import ( - deprecate_blueprint, - sunset_blueprint, -) - -# The api blueprint. It is registered with the Flask app (see app.py) -flexmeasures_api = Blueprint("flexmeasures_api_v1_1", __name__) -deprecate_blueprint( - flexmeasures_api, - deprecation_date="2022-12-14", - deprecation_link="https://flexmeasures.readthedocs.io/en/latest/api/v1_1.html", - sunset_date="2023-05-01", - sunset_link="https://flexmeasures.readthedocs.io/en/v0.13.0/api/v1_1.html", -) -sunset_blueprint( - flexmeasures_api, - "1.1", - "https://flexmeasures.readthedocs.io/en/v0.13.0/api/v1_1.html", -) - - -def register_at(app: Flask): - """This can be used to register this blueprint together with other api-related things""" - - import flexmeasures.api.v1_1.routes # noqa: F401 this is necessary to load the endpoints - - app.register_blueprint(flexmeasures_api, url_prefix="/api/v1_1") diff --git a/flexmeasures/api/v1_1/routes.py b/flexmeasures/api/v1_1/routes.py deleted file mode 100644 index 9066ff68d..000000000 --- a/flexmeasures/api/v1_1/routes.py +++ /dev/null @@ -1,41 +0,0 @@ -from flexmeasures.api.v1_1 import flexmeasures_api as flexmeasures_api_v1_1 - - -@flexmeasures_api_v1_1.route("/getConnection", methods=["GET"]) -def get_connection(): - pass - - -@flexmeasures_api_v1_1.route("/postPriceData", methods=["POST"]) -def post_price_data(): - pass - - -@flexmeasures_api_v1_1.route("/postWeatherData", methods=["POST"]) -def post_weather_data(): - pass - - -@flexmeasures_api_v1_1.route("/getPrognosis", methods=["GET"]) -def get_prognosis(): - pass - - -@flexmeasures_api_v1_1.route("/postPrognosis", methods=["POST"]) -def post_prognosis(): - pass - - -@flexmeasures_api_v1_1.route("/getMeterData", methods=["GET"]) -def get_meter_data(): - pass - - -@flexmeasures_api_v1_1.route("/postMeterData", methods=["POST"]) -def post_meter_data(): - pass - - -@flexmeasures_api_v1_1.route("/getService", methods=["GET"]) -def get_service(): - pass diff --git a/flexmeasures/api/v1_2/__init__.py b/flexmeasures/api/v1_2/__init__.py deleted file mode 100644 index 09c18def0..000000000 --- a/flexmeasures/api/v1_2/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -from flask import Flask, Blueprint - -from flexmeasures.api.common.utils.deprecation_utils import ( - deprecate_blueprint, - sunset_blueprint, -) - -# The api blueprint. It is registered with the Flask app (see app.py) -flexmeasures_api = Blueprint("flexmeasures_api_v1_2", __name__) -deprecate_blueprint( - flexmeasures_api, - deprecation_date="2022-12-14", - deprecation_link="https://flexmeasures.readthedocs.io/en/latest/api/v1_2.html", - sunset_date="2023-05-01", - sunset_link="https://flexmeasures.readthedocs.io/en/v0.13.0/api/v1_2.html", -) -sunset_blueprint( - flexmeasures_api, - "1.2", - "https://flexmeasures.readthedocs.io/en/v0.13.0/api/v1_2.html", -) - - -def register_at(app: Flask): - """This can be used to register this blueprint together with other api-related things""" - - import flexmeasures.api.v1_2.routes # noqa: F401 this is necessary to load the endpoints - - app.register_blueprint(flexmeasures_api, url_prefix="/api/v1_2") diff --git a/flexmeasures/api/v1_2/routes.py b/flexmeasures/api/v1_2/routes.py deleted file mode 100644 index b4845c24e..000000000 --- a/flexmeasures/api/v1_2/routes.py +++ /dev/null @@ -1,51 +0,0 @@ -from flexmeasures.api.v1_2 import flexmeasures_api as flexmeasures_api_v1_2 - - -@flexmeasures_api_v1_2.route("/getDeviceMessage", methods=["GET"]) -def get_device_message(): - pass - - -@flexmeasures_api_v1_2.route("/postUdiEvent", methods=["POST"]) -def post_udi_event(): - pass - - -@flexmeasures_api_v1_2.route("/getConnection", methods=["GET"]) -def get_connection(): - pass - - -@flexmeasures_api_v1_2.route("/postPriceData", methods=["POST"]) -def post_price_data(): - pass - - -@flexmeasures_api_v1_2.route("/postWeatherData", methods=["POST"]) -def post_weather_data(): - pass - - -@flexmeasures_api_v1_2.route("/getPrognosis", methods=["GET"]) -def get_prognosis(): - pass - - -@flexmeasures_api_v1_2.route("/getMeterData", methods=["GET"]) -def get_meter_data(): - pass - - -@flexmeasures_api_v1_2.route("/postMeterData", methods=["POST"]) -def post_meter_data(): - pass - - -@flexmeasures_api_v1_2.route("/postPrognosis", methods=["POST"]) -def post_prognosis(): - pass - - -@flexmeasures_api_v1_2.route("/getService", methods=["GET"]) -def get_service(): - pass diff --git a/flexmeasures/api/v1_3/__init__.py b/flexmeasures/api/v1_3/__init__.py deleted file mode 100644 index ce6308d9f..000000000 --- a/flexmeasures/api/v1_3/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -from flask import Flask, Blueprint - -from flexmeasures.api.common.utils.deprecation_utils import ( - deprecate_blueprint, - sunset_blueprint, -) - -# The api blueprint. It is registered with the Flask app (see app.py) -flexmeasures_api = Blueprint("flexmeasures_api_v1_3", __name__) -deprecate_blueprint( - flexmeasures_api, - deprecation_date="2022-12-14", - deprecation_link="https://flexmeasures.readthedocs.io/en/latest/api/v1_3.html", - sunset_date="2023-05-01", - sunset_link="https://flexmeasures.readthedocs.io/en/v0.13.0/api/v1_3.html", -) -sunset_blueprint( - flexmeasures_api, - "1.3", - "https://flexmeasures.readthedocs.io/en/v0.13.0/api/v1_3.html", -) - - -def register_at(app: Flask): - """This can be used to register this blueprint together with other api-related things""" - - import flexmeasures.api.v1_3.routes # noqa: F401 this is necessary to load the endpoints - - app.register_blueprint(flexmeasures_api, url_prefix="/api/v1_3") diff --git a/flexmeasures/api/v1_3/routes.py b/flexmeasures/api/v1_3/routes.py deleted file mode 100644 index fad77460c..000000000 --- a/flexmeasures/api/v1_3/routes.py +++ /dev/null @@ -1,51 +0,0 @@ -from flexmeasures.api.v1_3 import flexmeasures_api as flexmeasures_api_v1_3 - - -@flexmeasures_api_v1_3.route("/getDeviceMessage", methods=["GET"]) -def get_device_message(): - pass - - -@flexmeasures_api_v1_3.route("/postUdiEvent", methods=["POST"]) -def post_udi_event(): - pass - - -@flexmeasures_api_v1_3.route("/getConnection", methods=["GET"]) -def get_connection(): - pass - - -@flexmeasures_api_v1_3.route("/postPriceData", methods=["POST"]) -def post_price_data(): - pass - - -@flexmeasures_api_v1_3.route("/postWeatherData", methods=["POST"]) -def post_weather_data(): - pass - - -@flexmeasures_api_v1_3.route("/getPrognosis", methods=["GET"]) -def get_prognosis(): - pass - - -@flexmeasures_api_v1_3.route("/getMeterData", methods=["GET"]) -def get_meter_data(): - pass - - -@flexmeasures_api_v1_3.route("/postMeterData", methods=["POST"]) -def post_meter_data(): - pass - - -@flexmeasures_api_v1_3.route("/postPrognosis", methods=["POST"]) -def post_prognosis(): - pass - - -@flexmeasures_api_v1_3.route("/getService", methods=["GET"]) -def get_service(): - pass diff --git a/flexmeasures/api/v2_0/__init__.py b/flexmeasures/api/v2_0/__init__.py deleted file mode 100644 index 3ee0746e7..000000000 --- a/flexmeasures/api/v2_0/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -from flask import Flask, Blueprint - -from flexmeasures.api.common.utils.deprecation_utils import ( - deprecate_blueprint, - sunset_blueprint, -) - -flexmeasures_api = Blueprint("flexmeasures_api_v2_0", __name__) -deprecate_blueprint( - flexmeasures_api, - deprecation_date="2022-12-14", - deprecation_link="https://flexmeasures.readthedocs.io/en/latest/api/v2_0.html", - sunset_date="2023-05-01", - sunset_link="https://flexmeasures.readthedocs.io/en/v0.13.0/api/v2_0.html", -) -sunset_blueprint( - flexmeasures_api, - "2.0", - "https://flexmeasures.readthedocs.io/en/v0.13.0/api/v2_0.html", -) - - -def register_at(app: Flask): - """This can be used to register this blueprint together with other api-related things""" - - import flexmeasures.api.v2_0.routes # noqa: F401 this is necessary to load the endpoints - - v2_0_api_prefix = "/api/v2_0" - - app.register_blueprint(flexmeasures_api, url_prefix=v2_0_api_prefix) diff --git a/flexmeasures/api/v2_0/routes.py b/flexmeasures/api/v2_0/routes.py deleted file mode 100644 index 88efcce5e..000000000 --- a/flexmeasures/api/v2_0/routes.py +++ /dev/null @@ -1,96 +0,0 @@ -from flexmeasures.api.v2_0 import flexmeasures_api as flexmeasures_api_v2_0 - - -@flexmeasures_api_v2_0.route("/assets", methods=["GET"]) -def get_assets(): - pass - - -@flexmeasures_api_v2_0.route("/assets", methods=["POST"]) -def post_assets(): - pass - - -@flexmeasures_api_v2_0.route("/asset/", methods=["GET"]) -def get_asset(id: int): - pass - - -@flexmeasures_api_v2_0.route("/asset/", methods=["PATCH"]) -def patch_asset(id: int): - pass - - -@flexmeasures_api_v2_0.route("/asset/", methods=["DELETE"]) -def delete_asset(id: int): - pass - - -@flexmeasures_api_v2_0.route("/users", methods=["GET"]) -def get_users(): - pass - - -@flexmeasures_api_v2_0.route("/user/", methods=["GET"]) -def get_user(id: int): - pass - - -@flexmeasures_api_v2_0.route("/user/", methods=["PATCH"]) -def patch_user(id: int): - pass - - -@flexmeasures_api_v2_0.route("/user//password-reset", methods=["PATCH"]) -def reset_user_password(id: int): - pass - - -@flexmeasures_api_v2_0.route("/getConnection", methods=["GET"]) -def get_connection(): - pass - - -@flexmeasures_api_v2_0.route("/postPriceData", methods=["POST"]) -def post_price_data(): - pass - - -@flexmeasures_api_v2_0.route("/postWeatherData", methods=["POST"]) -def post_weather_data(): - pass - - -@flexmeasures_api_v2_0.route("/getPrognosis", methods=["GET"]) -def get_prognosis(): - pass - - -@flexmeasures_api_v2_0.route("/getMeterData", methods=["GET"]) -def get_meter_data(): - pass - - -@flexmeasures_api_v2_0.route("/postMeterData", methods=["POST"]) -def post_meter_data(): - pass - - -@flexmeasures_api_v2_0.route("/postPrognosis", methods=["POST"]) -def post_prognosis(): - pass - - -@flexmeasures_api_v2_0.route("/getService", methods=["GET"]) -def get_service(): - pass - - -@flexmeasures_api_v2_0.route("/getDeviceMessage", methods=["GET"]) -def get_device_message(): - pass - - -@flexmeasures_api_v2_0.route("/postUdiEvent", methods=["POST"]) -def post_udi_event(): - pass diff --git a/flexmeasures/api/v3_0/__init__.py b/flexmeasures/api/v3_0/__init__.py index 5b2854fa6..42800be61 100644 --- a/flexmeasures/api/v3_0/__init__.py +++ b/flexmeasures/api/v3_0/__init__.py @@ -1,3 +1,7 @@ +""" +FlexMeasures API v3 +""" + from flask import Flask from flexmeasures.api.v3_0.sensors import SensorAPI diff --git a/flexmeasures/api/v3_0/health.py b/flexmeasures/api/v3_0/health.py index cff0db8e3..cf2909d6b 100644 --- a/flexmeasures/api/v3_0/health.py +++ b/flexmeasures/api/v3_0/health.py @@ -2,6 +2,8 @@ from flask_classful import FlaskView, route from flask_json import as_json + +from redis.exceptions import ConnectionError from flexmeasures.data import db @@ -14,6 +16,18 @@ def _check_sql_database(): return False +def _check_redis() -> bool: + """Check status of the redis instance + + :return: True if the redis instance is active, False otherwise + """ + try: + current_app.redis_connection.ping() + return True + except ConnectionError: + return False + + class HealthAPI(FlaskView): route_base = "/health" @@ -32,11 +46,19 @@ def is_ready(self): .. sourcecode:: json { - 'database_sql': True + 'database_sql': True, + 'database_redis': False } """ - status = {"database_sql": _check_sql_database()} # TODO: check redis + + status = { + "database_sql": _check_sql_database(), + } + + if current_app.config.get("FLEXMEASURES_REDIS_PASSWORD") is not None: + status["database_redis"] = _check_redis() + if all(status.values()): return status, 200 else: diff --git a/flexmeasures/api/v3_0/sensors.py b/flexmeasures/api/v3_0/sensors.py index 405bcdec5..be2164fb8 100644 --- a/flexmeasures/api/v3_0/sensors.py +++ b/flexmeasures/api/v3_0/sensors.py @@ -46,6 +46,7 @@ get_sensor_schema = GetSensorDataSchema() post_sensor_schema = PostSensorDataSchema() sensors_schema = SensorSchema(many=True) +sensor_schema = SensorSchema() class SensorAPI(FlaskView): @@ -273,6 +274,7 @@ def trigger_schedule( # noqa: C901 To guarantee a minimum SOC in the period prior to 4.00pm, local minima constraints are imposed (via soc-minima) at 2.00pm and 3.00pm, for 15kWh and 20kWh, respectively. Roundtrip efficiency for use in scheduling is set to 98%. + Storage efficiency is set to 99.99%, denoting the state of charge left after each time step equal to the sensor's resolution. Aggregate consumption (of all devices within this EMS) should be priced by sensor 9, and aggregate production should be priced by sensor 10, where the aggregate power flow in the EMS is described by the sum over sensors 13, 14 and 15 @@ -306,6 +308,7 @@ def trigger_schedule( # noqa: C901 "soc-min": 10, "soc-max": 25, "roundtrip-efficiency": 0.98, + "storage-efficiency": 0.9999, }, "flex-context": { "consumption-price-sensor": 9, @@ -492,3 +495,38 @@ def get_schedule(self, sensor: Sensor, job_id: str, duration: timedelta, **kwarg d, s = request_processed() return dict(**response, **d), s + + @route("/", methods=["GET"]) + @use_kwargs({"sensor": SensorIdField(data_key="id")}, location="path") + @permission_required_for_context("read", arg_name="sensor") + @as_json + def fetch_one(self, id, sensor): + """Fetch a given sensor. + + .. :quickref: Sensor; Get a sensor + + This endpoint gets a sensor. + + **Example response** + + .. sourcecode:: json + + { + "name": "some gas sensor", + "unit": "m³/h", + "entity_address": "ea1.2023-08.localhost:fm1.1", + "event_resolution": 10, + "generic_asset_id": 4, + "timezone": "UTC", + } + + :reqheader Authorization: The authentication token + :reqheader Content-Type: application/json + :resheader Content-Type: application/json + :status 200: PROCESSED + :status 400: INVALID_REQUEST, REQUIRED_INFO_MISSING, UNEXPECTED_PARAMS + :status 401: UNAUTHORIZED + :status 403: INVALID_SENDER + :status 422: UNPROCESSABLE_ENTITY + """ + return sensor_schema.dump(sensor), 200 diff --git a/flexmeasures/api/v3_0/tests/test_sensor_schedules.py b/flexmeasures/api/v3_0/tests/test_sensor_schedules.py index 53891c677..b2ab198f0 100644 --- a/flexmeasures/api/v3_0/tests/test_sensor_schedules.py +++ b/flexmeasures/api/v3_0/tests/test_sensor_schedules.py @@ -221,6 +221,9 @@ def test_trigger_and_get_schedule( roundtrip_efficiency = ( float(message["roundtrip-efficiency"].replace("%", "")) / 100.0 ) + storage_efficiency = ( + float(message["storage-efficiency"].replace("%", "")) / 100.0 + ) soc_targets = message.get("soc-targets") else: start_soc = message["flex-model"]["soc-at-start"] / 1000 # in MWh @@ -228,6 +231,9 @@ def test_trigger_and_get_schedule( float(message["flex-model"]["roundtrip-efficiency"].replace("%", "")) / 100.0 ) + storage_efficiency = ( + float(message["flex-model"]["storage-efficiency"].replace("%", "")) / 100.0 + ) soc_targets = message["flex-model"].get("soc-targets") resolution = sensor.event_resolution if soc_targets: @@ -271,6 +277,7 @@ def test_trigger_and_get_schedule( start_soc, up_efficiency=roundtrip_efficiency**0.5, down_efficiency=roundtrip_efficiency**0.5, + storage_efficiency=storage_efficiency, decimal_precision=6, ) print(consumption_schedule) diff --git a/flexmeasures/api/v3_0/tests/test_sensors_api.py b/flexmeasures/api/v3_0/tests/test_sensors_api.py new file mode 100644 index 000000000..b57fcfd83 --- /dev/null +++ b/flexmeasures/api/v3_0/tests/test_sensors_api.py @@ -0,0 +1,33 @@ +from __future__ import annotations + + +from flask import url_for + + +from flexmeasures import Sensor +from flexmeasures.api.tests.utils import get_auth_token + + +def test_fetch_one_sensor( + client, + setup_api_test_data: dict[str, Sensor], +): + sensor_id = 1 + headers = make_headers_for("test_supplier_user_4@seita.nl", client) + response = client.get( + url_for("SensorAPI:fetch_one", id=sensor_id), + headers=headers, + ) + print("Server responded with:\n%s" % response.json) + assert response.status_code == 200 + assert response.json["name"] == "some gas sensor" + assert response.json["unit"] == "m³/h" + assert response.json["generic_asset_id"] == 4 + assert response.json["timezone"] == "UTC" + + +def make_headers_for(user_email: str | None, client) -> dict: + headers = {"content-type": "application/json"} + if user_email: + headers["Authorization"] = get_auth_token(client, user_email, "testtest") + return headers diff --git a/flexmeasures/api/v3_0/tests/utils.py b/flexmeasures/api/v3_0/tests/utils.py index 2ab606a4e..41d4e8e5a 100644 --- a/flexmeasures/api/v3_0/tests/utils.py +++ b/flexmeasures/api/v3_0/tests/utils.py @@ -61,6 +61,7 @@ def message_for_trigger_schedule( "soc-max": 40, # in kWh, according to soc-unit "soc-unit": "kWh", "roundtrip-efficiency": "98%", + "storage-efficiency": "99.99%", } if with_targets: if realistic_targets: diff --git a/flexmeasures/app.py b/flexmeasures/app.py index 70f1c89cd..b26c0aad3 100644 --- a/flexmeasures/app.py +++ b/flexmeasures/app.py @@ -1,6 +1,13 @@ +""" +Starting point of the Flask application. +""" + from __future__ import annotations import time +import os +from pathlib import Path +from datetime import date from flask import Flask, g, request from flask.cli import load_dotenv @@ -66,7 +73,9 @@ def create( # noqa C901 if app.testing: from fakeredis import FakeStrictRedis - redis_conn = FakeStrictRedis() + redis_conn = FakeStrictRedis( + host="redis", port="1234" + ) # dummy connection details else: redis_conn = Redis( app.config["FLEXMEASURES_REDIS_URL"], @@ -93,17 +102,29 @@ def create( # noqa C901 if app.env not in ("documentation", "development"): SSLify(app) + # Prepare profiling, if needed + + if app.config.get("FLEXMEASURES_PROFILE_REQUESTS", False): + Path("profile_reports").mkdir(parents=True, exist_ok=True) + try: + import pyinstrument # noqa F401 + except ImportError: + app.logger.warning( + "FLEXMEASURES_PROFILE_REQUESTS is True, but pyinstrument not installed ― I cannot produce profiling reports for requests." + ) + # Register database and models, including user auth security handlers from flexmeasures.data import register_at as register_db_at register_db_at(app) - # Register Reporters + # Register Reporters and Schedulers from flexmeasures.utils.coding_utils import get_classes_module - from flexmeasures.data.models.reporting import Reporter + from flexmeasures.data.models import reporting, planning - app.reporters = get_classes_module("flexmeasures.data.models.reporting", Reporter) + app.reporters = get_classes_module("flexmeasures.data.models", reporting.Reporter) + app.schedulers = get_classes_module("flexmeasures.data.models", planning.Scheduler) # add auth policy @@ -145,6 +166,13 @@ def create( # noqa C901 def before_request(): if app.config.get("FLEXMEASURES_PROFILE_REQUESTS", False): g.start = time.time() + try: + import pyinstrument # noqa F401 + + g.profiler = pyinstrument.Profiler() + g.profiler.start() + except ImportError: + pass @app.teardown_request def teardown_request(exception=None): @@ -154,5 +182,22 @@ def teardown_request(exception=None): app.logger.info( f"[PROFILE] {str(round(diff, 2)).rjust(6)} seconds to serve {request.url}." ) + if not hasattr(g, "profiler"): + return app + g.profiler.stop() + output_html = g.profiler.output_html(timeline=True) + endpoint = request.endpoint + if endpoint is None: + endpoint = "unknown" + today = date.today() + profile_filename = f"pyinstrument_{endpoint}.html" + profile_output_path = Path( + "profile_reports", today.strftime("%Y-%m-%d") + ) + profile_output_path.mkdir(parents=True, exist_ok=True) + with open( + os.path.join(profile_output_path, profile_filename), "w+" + ) as f: + f.write(output_html) return app diff --git a/flexmeasures/auth/__init__.py b/flexmeasures/auth/__init__.py index 1ce426d7a..75f051f81 100644 --- a/flexmeasures/auth/__init__.py +++ b/flexmeasures/auth/__init__.py @@ -1,3 +1,7 @@ +""" +Authentication and authorization policies and helpers. +""" + from flask import Flask from flask_security import Security, SQLAlchemySessionUserDatastore from flask_login import user_logged_in, current_user @@ -6,11 +10,6 @@ from flexmeasures.data import db -""" -Configure authentication and authorization. -""" - - def register_at(app: Flask): from flexmeasures.auth.error_handling import ( diff --git a/flexmeasures/auth/decorators.py b/flexmeasures/auth/decorators.py index 7bb6d11ba..d2afbdd08 100644 --- a/flexmeasures/auth/decorators.py +++ b/flexmeasures/auth/decorators.py @@ -1,3 +1,8 @@ +""" +Auth decorators for endpoints +""" + + from __future__ import annotations from typing import Callable diff --git a/flexmeasures/auth/policy.py b/flexmeasures/auth/policy.py index b5c42b5f1..6a2bbaa03 100644 --- a/flexmeasures/auth/policy.py +++ b/flexmeasures/auth/policy.py @@ -1,3 +1,7 @@ +""" +Tooling & docs for implementing our auth policy +""" + from __future__ import annotations # Use | instead of Union, list instead of List and tuple instead of Tuple when FM stops supporting Python 3.9 (because of https://github.com/python/cpython/issues/86399) diff --git a/flexmeasures/cli/__init__.py b/flexmeasures/cli/__init__.py index 63716ee75..09c246f81 100644 --- a/flexmeasures/cli/__init__.py +++ b/flexmeasures/cli/__init__.py @@ -1,3 +1,7 @@ +""" +CLI functions for FlexMeasures hosts. +""" + import sys from flask import Flask, current_app diff --git a/flexmeasures/cli/data_add.py b/flexmeasures/cli/data_add.py index e66467bad..cf31026bf 100755 --- a/flexmeasures/cli/data_add.py +++ b/flexmeasures/cli/data_add.py @@ -1,8 +1,11 @@ -"""CLI Tasks for populating the database - most useful in development""" +""" +CLI commands for populating the database +""" + from __future__ import annotations from datetime import datetime, timedelta -from typing import Optional, Type +from typing import Type import json from pathlib import Path from io import TextIOBase @@ -49,6 +52,7 @@ LongitudeField, SensorIdField, ) +from flexmeasures.data.schemas.scheduling.storage import EfficiencyField from flexmeasures.data.schemas.sensors import SensorSchema from flexmeasures.data.schemas.units import QuantityField from flexmeasures.data.schemas.generic_assets import ( @@ -62,7 +66,7 @@ ) from flexmeasures.data.services.utils import get_or_create_model from flexmeasures.utils import flexmeasures_inflection -from flexmeasures.utils.time_utils import server_now, get_timezone, apply_offset_chain +from flexmeasures.utils.time_utils import server_now, apply_offset_chain from flexmeasures.utils.unit_utils import convert_units, ur from flexmeasures.data.utils import save_to_db from flexmeasures.data.models.reporting import Reporter @@ -129,7 +133,12 @@ def new_account(name: str, roles: str): @with_appcontext @click.option("--username", required=True) @click.option("--email", required=True) -@click.option("--account-id", type=int, required=True) +@click.option( + "--account-id", + type=int, + required=True, + help="Add user to this account. Follow up with the account's ID.", +) @click.option("--roles", help="e.g. anonymous,Prosumer,CPO") @click.option( "--timezone", @@ -282,7 +291,12 @@ def add_asset_type(**args): type=LongitudeField(), help="Longitude of the asset's location", ) -@click.option("--account-id", type=int, required=True) +@click.option( + "--account-id", + type=int, + required=False, + help="Add asset to this account. Follow up with the account's ID. If not set, the asset will become public (which makes it accessible to all users).", +) @click.option( "--asset-type-id", "generic_asset_type_id", @@ -294,6 +308,11 @@ def add_asset(**args): """Add an asset.""" check_errors(GenericAssetSchema().validate(args)) generic_asset = GenericAsset(**args) + if generic_asset.account_id is None: + click.secho( + "Creating a PUBLIC asset, as no --account-id is given ...", + **MsgStyle.WARN, + ) db.session.add(generic_asset) db.session.commit() click.secho( @@ -315,7 +334,7 @@ def add_initial_structure(): "--name", required=True, type=str, - help="Name of the source (usually an organisation)", + help="Name of the source (usually an organization)", ) @click.option( "--model", @@ -355,7 +374,7 @@ def add_source(name: str, model: str, version: str, source_type: str): "sensor", required=True, type=SensorIdField(), - help="Sensor to which the beliefs pertain.", + help="Record the beliefs under this sensor. Follow up with the sensor's ID. ", ) @click.option( "--source", @@ -1012,11 +1031,22 @@ def create_schedule(ctx): @click.option( "--roundtrip-efficiency", "roundtrip_efficiency", - type=QuantityField("%", validate=validate.Range(min=0, max=1)), + type=EfficiencyField(), required=False, default=1, help="Round-trip efficiency (e.g. 85% or 0.85) to use for the schedule. Defaults to 100% (no losses).", ) +@click.option( + "--storage-efficiency", + "storage_efficiency", + type=EfficiencyField(), + required=False, + default=1, + help="Storage efficiency (e.g. 95% or 0.95) to use for the schedule," + " applied over each time step equal to the sensor resolution." + " For example, a storage efficiency of 99 percent per (absolute) day, for scheduling a 1-hour resolution sensor, should be passed as a storage efficiency of 0.99**(1/24)." + " Defaults to 100% (no losses).", +) @click.option( "--as-job", is_flag=True, @@ -1036,6 +1066,7 @@ def add_schedule_for_storage( soc_min: ur.Quantity | None = None, soc_max: ur.Quantity | None = None, roundtrip_efficiency: ur.Quantity | None = None, + storage_efficiency: ur.Quantity | None = None, as_job: bool = False, ): """Create a new schedule for a storage asset. @@ -1091,6 +1122,8 @@ def add_schedule_for_storage( soc_max = convert_units(soc_max.magnitude, str(soc_max.units), "MWh", capacity=capacity_str) # type: ignore if roundtrip_efficiency is not None: roundtrip_efficiency = roundtrip_efficiency.magnitude / 100.0 + if storage_efficiency is not None: + storage_efficiency = storage_efficiency.magnitude / 100.0 scheduling_kwargs = dict( start=start, @@ -1104,6 +1137,7 @@ def add_schedule_for_storage( "soc-max": soc_max, "soc-unit": "MWh", "roundtrip-efficiency": roundtrip_efficiency, + "storage-efficiency": storage_efficiency, }, flex_context={ "consumption-price-sensor": consumption_price_sensor.id, @@ -1131,7 +1165,7 @@ def add_schedule_for_storage( "sensor", type=SensorIdField(), required=True, - help="ID of the sensor used to save the report." + help="Sensor used to save the report. Follow up with the sensor's ID. " " If needed, use `flexmeasures add sensor` to create a new sensor first.", ) @click.option( @@ -1196,8 +1230,7 @@ def add_schedule_for_storage( "--timezone", "timezone", required=False, - default="UTC", - help="Timezone as string, e.g. 'UTC' or 'Europe/Amsterdam' (defaults to FLEXMEASURES_TIMEZONE config setting)", + help="Timezone as string, e.g. 'UTC' or 'Europe/Amsterdam' (defaults to the timezone of the sensor used to save the report).", ) @click.option( "--dry-run", @@ -1209,26 +1242,26 @@ def add_report( # noqa: C901 reporter_class: str, sensor: Sensor, reporter_config: TextIOBase, - start: Optional[datetime] = None, - end: Optional[datetime] = None, - start_offset: Optional[str] = None, - end_offset: Optional[str] = None, - resolution: Optional[timedelta] = None, - output_file: Optional[Path] = None, + start: datetime | None = None, + end: datetime | None = None, + start_offset: str | None = None, + end_offset: str | None = None, + resolution: timedelta | None = None, + output_file: Path | None = None, dry_run: bool = False, - timezone: str | pytz.BaseTzInfo = get_timezone(), + timezone: str | None = None, ): """ Create a new report using the Reporter class and save the results to the database or export them as CSV or Excel file. """ - # parse timezone into a BaseTzInfo object - if isinstance(timezone, str): + # compute now in the timezone local to the output sensor + if timezone is not None: check_timezone(timezone) - timezone = pytz.timezone(zone=timezone) - - now = timezone.localize(datetime.now()) + now = pytz.timezone( + zone=timezone if timezone is not None else sensor.timezone + ).localize(datetime.now()) # apply offsets, if provided if start_offset is not None: @@ -1313,10 +1346,10 @@ def add_report( # noqa: C901 "Report computation done, but the report is empty.", **MsgStyle.WARN ) - # save the report it's not running in dry mode + # save the report if it's not running in dry mode if not dry_run: click.echo("Saving report to the database...") - save_to_db(result) + save_to_db(result.dropna()) db.session.commit() click.secho( "Success. The report has been saved to the database.", diff --git a/flexmeasures/cli/data_delete.py b/flexmeasures/cli/data_delete.py index da9390bda..fc35c9f08 100644 --- a/flexmeasures/cli/data_delete.py +++ b/flexmeasures/cli/data_delete.py @@ -1,3 +1,7 @@ +""" +CLI commands for removing data +""" + from __future__ import annotations from datetime import timedelta @@ -303,18 +307,26 @@ def delete_nan_beliefs(sensor_id: int | None = None): @with_appcontext @click.option( "--id", - "sensor", + "sensors", type=SensorIdField(), required=True, - help="Delete a single sensor and its (time series) data. Follow up with the sensor's ID.", + multiple=True, + help="Delete a sensor and its (time series) data. Follow up with the sensor's ID. " + "This argument can be given multiple times", ) def delete_sensor( - sensor: Sensor, + sensors: list[Sensor], ): - """Delete a sensor and all beliefs about it.""" - n = TimedBelief.query.filter(TimedBelief.sensor_id == sensor.id).delete() - db.session.delete(sensor) - click.confirm(f"Delete {sensor.__repr__()}, along with {n} beliefs?", abort=True) + """Delete sensors and their (time series) data.""" + n = TimedBelief.query.filter( + TimedBelief.sensor_id.in_(sensor.id for sensor in sensors) + ).delete() + for sensor in sensors: + db.session.delete(sensor) + click.confirm( + f"Delete {', '.join(sensor.__repr__() for sensor in sensors)}, along with {n} beliefs?", + abort=True, + ) db.session.commit() diff --git a/flexmeasures/cli/data_edit.py b/flexmeasures/cli/data_edit.py index a1569d579..63b104fb9 100644 --- a/flexmeasures/cli/data_edit.py +++ b/flexmeasures/cli/data_edit.py @@ -1,3 +1,7 @@ +""" +CLI commands for editing data +""" + from __future__ import annotations from datetime import timedelta @@ -6,6 +10,7 @@ import pandas as pd from flask import current_app as app from flask.cli import with_appcontext +import json from flexmeasures import Sensor from flexmeasures.data import db @@ -74,6 +79,20 @@ def fm_edit_data(): type=int, help="Set the attribute to this integer value.", ) +@click.option( + "--list", + "attribute_list_value", + required=False, + type=str, + help="Set the attribute to this list value. Pass a string with a JSON-parse-able list representation, e.g. '[1,\"a\"]'.", +) +@click.option( + "--dict", + "attribute_dict_value", + required=False, + type=str, + help="Set the attribute to this dict value. Pass a string with a JSON-parse-able dict representation, e.g. '{1:\"a\"}'.", +) @click.option( "--null", "attribute_null_value", @@ -91,6 +110,8 @@ def edit_attribute( attribute_bool_value: bool | None = None, attribute_str_value: str | None = None, attribute_int_value: int | None = None, + attribute_list_value: str | None = None, + attribute_dict_value: str | None = None, ): """Edit (or add) an asset attribute or sensor attribute.""" @@ -103,6 +124,8 @@ def edit_attribute( attribute_bool_value=attribute_bool_value, attribute_str_value=attribute_str_value, attribute_int_value=attribute_int_value, + attribute_list_value=attribute_list_value, + attribute_dict_value=attribute_dict_value, attribute_null_value=attribute_null_value, ) @@ -207,13 +230,15 @@ def resample_sensor_data( app.cli.add_command(fm_edit_data) -def parse_attribute_value( +def parse_attribute_value( # noqa: C901 attribute_null_value: bool, attribute_float_value: float | None = None, attribute_bool_value: bool | None = None, attribute_str_value: str | None = None, attribute_int_value: int | None = None, -) -> float | int | bool | str | None: + attribute_list_value: str | None = None, + attribute_dict_value: str | None = None, +) -> float | int | bool | str | list | dict | None: """Parse attribute value.""" if not single_true( [attribute_null_value] @@ -224,6 +249,8 @@ def parse_attribute_value( attribute_bool_value, attribute_str_value, attribute_int_value, + attribute_list_value, + attribute_dict_value, ] ] ): @@ -236,6 +263,22 @@ def parse_attribute_value( return bool(attribute_bool_value) elif attribute_int_value is not None: return int(attribute_int_value) + elif attribute_list_value is not None: + try: + val = json.loads(attribute_list_value) + except json.decoder.JSONDecodeError as jde: + raise ValueError(f"Error parsing list value: {jde}") + if not isinstance(val, list): + raise ValueError(f"{val} is not a list.") + return val + elif attribute_dict_value is not None: + try: + val = json.loads(attribute_dict_value) + except json.decoder.JSONDecodeError as jde: + raise ValueError(f"Error parsing dict value: {jde}") + if not isinstance(val, dict): + raise ValueError(f"{val} is not a dict.") + return val return attribute_str_value diff --git a/flexmeasures/cli/data_show.py b/flexmeasures/cli/data_show.py index 594bdcd6a..8b5c86266 100644 --- a/flexmeasures/cli/data_show.py +++ b/flexmeasures/cli/data_show.py @@ -1,4 +1,6 @@ -"""CLI Tasks for listing database contents - most useful in development""" +""" +CLI commands for listing database contents and classes +""" from __future__ import annotations @@ -376,28 +378,48 @@ def plot_beliefs( click.secho("Data saved to file.", **MsgStyle.SUCCESS) -@fm_show_data.command("reporters") -@with_appcontext -def list_reporters(): +def list_items(item_type): """ - Show available reporters. + Show available items of a specific type. """ - click.echo("Reporters:\n") + click.echo(f"{item_type.capitalize()}:\n") click.echo( tabulate( [ ( - reporter_name, - reporter_class.__version__, - reporter_class.__author__, - reporter_class.__module__, + item_name, + item_class.__version__, + item_class.__author__, + item_class.__module__, ) - for reporter_name, reporter_class in app.reporters.items() + for item_name, item_class in getattr(app, item_type).items() ], headers=["name", "version", "author", "module"], ) ) +@fm_show_data.command("reporters") +@with_appcontext +def list_reporters(): + """ + Show available reporters. + """ + + with app.app_context(): + list_items("reporters") + + +@fm_show_data.command("schedulers") +@with_appcontext +def list_schedulers(): + """ + Show available schedulers. + """ + + with app.app_context(): + list_items("schedulers") + + app.cli.add_command(fm_show_data) diff --git a/flexmeasures/cli/db_ops.py b/flexmeasures/cli/db_ops.py index 52f6dee7b..6b7b3dcca 100644 --- a/flexmeasures/cli/db_ops.py +++ b/flexmeasures/cli/db_ops.py @@ -1,4 +1,4 @@ -"""CLI Tasks for saving, resetting, etc of the database""" +"""CLI commands for saving, resetting, etc of the database""" from datetime import datetime import subprocess diff --git a/flexmeasures/cli/jobs.py b/flexmeasures/cli/jobs.py index e0b4af220..59ad5d6b7 100644 --- a/flexmeasures/cli/jobs.py +++ b/flexmeasures/cli/jobs.py @@ -1,3 +1,7 @@ +""" +CLI commands for controlling jobs +""" + from __future__ import annotations import random diff --git a/flexmeasures/cli/monitor.py b/flexmeasures/cli/monitor.py index 604fdb61d..55430b7cf 100644 --- a/flexmeasures/cli/monitor.py +++ b/flexmeasures/cli/monitor.py @@ -1,3 +1,7 @@ +""" +CLI commands for monitoring functionality. +""" + from __future__ import annotations from datetime import datetime, timedelta diff --git a/flexmeasures/cli/utils.py b/flexmeasures/cli/utils.py index 95790049b..8f7cf6677 100644 --- a/flexmeasures/cli/utils.py +++ b/flexmeasures/cli/utils.py @@ -1,3 +1,7 @@ +""" +Utils for FlexMeasures CLI +""" + from __future__ import annotations from typing import Any diff --git a/flexmeasures/config/tests/__init__.py b/flexmeasures/config/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/flexmeasures/data/__init__.py b/flexmeasures/data/__init__.py index c13b5da50..3eb2e24f0 100644 --- a/flexmeasures/data/__init__.py +++ b/flexmeasures/data/__init__.py @@ -1,3 +1,7 @@ +""" +Models & schemata, as well as business logic (queries & services). +""" + import os from flask import Flask diff --git a/flexmeasures/data/config.py b/flexmeasures/data/config.py index 26af9d753..99c63e5cc 100644 --- a/flexmeasures/data/config.py +++ b/flexmeasures/data/config.py @@ -1,3 +1,7 @@ +""" +Database configuration utils +""" + from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import MetaData import sqlalchemy as sa diff --git a/flexmeasures/data/migrations/versions/2ac7fb39ce0c_add_attribute_column_to_data_source.py b/flexmeasures/data/migrations/versions/2ac7fb39ce0c_add_attribute_column_to_data_source.py new file mode 100644 index 000000000..8698bc3a5 --- /dev/null +++ b/flexmeasures/data/migrations/versions/2ac7fb39ce0c_add_attribute_column_to_data_source.py @@ -0,0 +1,51 @@ +"""add attribute column to data source + +Revision ID: 2ac7fb39ce0c +Revises: d814c0688ae0 +Create Date: 2023-06-05 23:41:31.788961 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "2ac7fb39ce0c" +down_revision = "d814c0688ae0" +branch_labels = None +depends_on = None + + +def upgrade(): + # add the column `attributes` to the table `data_source` + op.add_column( + "data_source", + sa.Column("attributes", sa.JSON(), nullable=True, default={}), + ) + + # add the column `attributes_hash` to the table `data_source` + op.add_column( + "data_source", + sa.Column("attributes_hash", sa.LargeBinary(length=256), nullable=True), + ) + + # remove previous uniqueness constraint and add a new that takes attributes_hash into account + op.drop_constraint(op.f("data_source_name_key"), "data_source", type_="unique") + op.create_unique_constraint( + "data_source_name_key", + "data_source", + ["name", "user_id", "model", "version", "attributes_hash"], + ) + + +def downgrade(): + + op.drop_constraint("data_source_name_key", "data_source", type_="unique") + op.create_unique_constraint( + "data_source_name_key", + "data_source", + ["name", "user_id", "model", "version"], + ) + + op.drop_column("data_source", "attributes") + op.drop_column("data_source", "attributes_hash") diff --git a/flexmeasures/data/models/__init__.py b/flexmeasures/data/models/__init__.py index 0995b0a8c..3cd8eba1f 100644 --- a/flexmeasures/data/models/__init__.py +++ b/flexmeasures/data/models/__init__.py @@ -1,3 +1,7 @@ +""" +Data models for FlexMeasures +""" + # Time resolutions resolutions = ["15T", "1h", "1d", "1w"] diff --git a/flexmeasures/data/models/charts/belief_charts.py b/flexmeasures/data/models/charts/belief_charts.py index 09782470e..f3dc06aed 100644 --- a/flexmeasures/data/models/charts/belief_charts.py +++ b/flexmeasures/data/models/charts/belief_charts.py @@ -29,7 +29,11 @@ def bar_chart( stack=None, **FIELD_DEFINITIONS["event_value"], ) - event_start_field_definition = FIELD_DEFINITIONS["event_start"] + if unit == "%": + event_value_field_definition["scale"] = dict( + domain={"unionWith": [0, 105]}, nice=False + ) + event_start_field_definition = FIELD_DEFINITIONS["event_start"].copy() event_start_field_definition["timeUnit"] = { "unit": "yearmonthdatehoursminutesseconds", "step": sensor.event_resolution.total_seconds(), @@ -90,15 +94,16 @@ def chart_for_multiple_sensors( **override_chart_specs: dict, ): # Determine the shared data resolution + all_shown_sensors = flatten_unique(sensors_to_show) condition = list( sensor.event_resolution - for sensor in flatten_unique(sensors_to_show) + for sensor in all_shown_sensors if sensor.event_resolution > timedelta(0) ) minimum_non_zero_resolution = min(condition) if any(condition) else timedelta(0) # Set up field definition for event starts - event_start_field_definition = FIELD_DEFINITIONS["event_start"] + event_start_field_definition = FIELD_DEFINITIONS["event_start"].copy() event_start_field_definition["timeUnit"] = { "unit": "yearmonthdatehoursminutesseconds", "step": minimum_non_zero_resolution.total_seconds(), @@ -112,6 +117,12 @@ def chart_for_multiple_sensors( ] } + # Set up field definition for sensor descriptions + sensor_field_definition = FIELD_DEFINITIONS["sensor_description"].copy() + sensor_field_definition["scale"] = dict( + domain=[sensor.to_dict()["description"] for sensor in all_shown_sensors] + ) + sensors_specs = [] for s in sensors_to_show: # List the sensors that go into one row @@ -132,6 +143,10 @@ def chart_for_multiple_sensors( stack=None, **FIELD_DEFINITIONS["event_value"], ) + if unit == "%": + event_value_field_definition["scale"] = dict( + domain={"unionWith": [0, 105]}, nice=False + ) # Set up shared tooltip shared_tooltip = [ @@ -164,7 +179,10 @@ def chart_for_multiple_sensors( # Draw a line for each sensor (and each source) layers = [ create_line_layer( - row_sensors, event_start_field_definition, event_value_field_definition + row_sensors, + event_start_field_definition, + event_value_field_definition, + sensor_field_definition, ) ] @@ -186,6 +204,7 @@ def chart_for_multiple_sensors( row_sensors, event_start_field_definition, event_value_field_definition, + sensor_field_definition, shared_tooltip, ) ) @@ -269,11 +288,12 @@ def create_line_layer( sensors: list["Sensor"], # noqa F821 event_start_field_definition: dict, event_value_field_definition: dict, + sensor_field_definition: dict, ): event_resolutions = list(set([sensor.event_resolution for sensor in sensors])) - assert ( - len(event_resolutions) == 1 - ), "Sensors shown within one row must share the same event resolution." + assert all(res == timedelta(0) for res in event_resolutions) or all( + res != timedelta(0) for res in event_resolutions + ), "Sensors shown within one row must all be instantaneous (zero event resolution) or all be non-instantatneous (non-zero event resolution)." event_resolution = event_resolutions[0] line_layer = { "mark": { @@ -286,7 +306,7 @@ def create_line_layer( "encoding": { "x": event_start_field_definition, "y": event_value_field_definition, - "color": FIELD_DEFINITIONS["sensor_description"], + "color": sensor_field_definition, "strokeDash": { "scale": { # Distinguish forecasters and schedulers by line stroke @@ -309,6 +329,7 @@ def create_circle_layer( sensors: list["Sensor"], # noqa F821 event_start_field_definition: dict, event_value_field_definition: dict, + sensor_field_definition: dict, shared_tooltip: list, ): params = [ @@ -348,7 +369,7 @@ def create_circle_layer( "encoding": { "x": event_start_field_definition, "y": event_value_field_definition, - "color": FIELD_DEFINITIONS["sensor_description"], + "color": sensor_field_definition, "size": { "condition": {"value": "200", "test": {"or": or_conditions}}, "value": "0", diff --git a/flexmeasures/data/models/data_sources.py b/flexmeasures/data/models/data_sources.py index e0524021d..307b008d0 100644 --- a/flexmeasures/data/models/data_sources.py +++ b/flexmeasures/data/models/data_sources.py @@ -1,11 +1,14 @@ from __future__ import annotations -from typing import TYPE_CHECKING +import json +from typing import TYPE_CHECKING, Any +from sqlalchemy.ext.mutable import MutableDict import timely_beliefs as tb from flexmeasures.data import db from flask import current_app +import hashlib if TYPE_CHECKING: @@ -57,7 +60,9 @@ class DataSource(db.Model, tb.BeliefSourceDBMixin): """Each data source is a data-providing entity.""" __tablename__ = "data_source" - __table_args__ = (db.UniqueConstraint("name", "user_id", "model", "version"),) + __table_args__ = ( + db.UniqueConstraint("name", "user_id", "model", "version", "attributes_hash"), + ) # The type of data source (e.g. user, forecaster or scheduler) type = db.Column(db.String(80), default="") @@ -68,6 +73,10 @@ class DataSource(db.Model, tb.BeliefSourceDBMixin): ) user = db.relationship("User", backref=db.backref("data_source", lazy=True)) + attributes = db.Column(MutableDict.as_mutable(db.JSON), nullable=False, default={}) + + attributes_hash = db.Column(db.LargeBinary(length=256)) + # The model and version of a script source model = db.Column(db.String(80), nullable=True) version = db.Column( @@ -75,11 +84,19 @@ class DataSource(db.Model, tb.BeliefSourceDBMixin): nullable=True, ) + sensors = db.relationship( + "Sensor", + secondary="timed_belief", + backref=db.backref("data_sources", lazy="select"), + viewonly=True, + ) + def __init__( self, name: str | None = None, type: str | None = None, user: User | None = None, + attributes: dict | None = None, **kwargs, ): if user is not None: @@ -89,6 +106,13 @@ def __init__( elif user is None and type == "user": raise TypeError("A data source cannot have type 'user' but no user set.") self.type = type + + if attributes is not None: + self.attributes = attributes + self.attributes_hash = hashlib.sha256( + json.dumps(attributes).encode("utf-8") + ).digest() + tb.BeliefSourceDBMixin.__init__(self, name=name) db.Model.__init__(self, **kwargs) @@ -144,3 +168,17 @@ def to_dict(self) -> dict: type=self.type if self.type in ("forecaster", "scheduler") else "other", description=self.description, ) + + @staticmethod + def hash_attributes(attributes: dict) -> str: + return hashlib.sha256(json.dumps(attributes).encode("utf-8")).digest() + + def get_attribute(self, attribute: str, default: Any = None) -> Any: + """Looks for the attribute in the DataSource's attributes column.""" + return self.attributes.get(attribute, default) + + def has_attribute(self, attribute: str) -> bool: + return attribute in self.attributes + + def set_attribute(self, attribute: str, value): + self.attributes[attribute] = value diff --git a/flexmeasures/data/models/generic_assets.py b/flexmeasures/data/models/generic_assets.py index facdf2c7b..fe7e792a8 100644 --- a/flexmeasures/data/models/generic_assets.py +++ b/flexmeasures/data/models/generic_assets.py @@ -20,13 +20,11 @@ from flexmeasures.data.models.parsing_utils import parse_source_arg from flexmeasures.data.models.user import User from flexmeasures.data.queries.annotations import query_asset_annotations +from flexmeasures.data.services.timerange import get_timerange from flexmeasures.auth.policy import AuthModelMixin, EVERY_LOGGED_IN_USER from flexmeasures.utils import geo_utils from flexmeasures.utils.coding_utils import flatten_unique -from flexmeasures.utils.time_utils import ( - determine_minimum_resampling_resolution, - server_now, -) +from flexmeasures.utils.time_utils import determine_minimum_resampling_resolution class GenericAssetType(db.Model): @@ -525,26 +523,9 @@ def get_timerange(cls, sensors: List["Sensor"]) -> Dict[str, datetime]: # noqa 'end': datetime.datetime(2020, 12, 3, 14, 30, tzinfo=pytz.utc) } """ - from flexmeasures.data.models.time_series import TimedBelief - sensor_ids = [s.id for s in flatten_unique(sensors)] - least_recent_query = ( - TimedBelief.query.filter(TimedBelief.sensor_id.in_(sensor_ids)) - .order_by(TimedBelief.event_start.asc()) - .limit(1) - ) - most_recent_query = ( - TimedBelief.query.filter(TimedBelief.sensor_id.in_(sensor_ids)) - .order_by(TimedBelief.event_start.desc()) - .limit(1) - ) - results = least_recent_query.union_all(most_recent_query).all() - if not results: - # return now in case there is no data for any of the sensors - now = server_now() - return dict(start=now, end=now) - least_recent, most_recent = results - return dict(start=least_recent.event_start, end=most_recent.event_end) + start, end = get_timerange(sensor_ids) + return dict(start=start, end=end) def create_generic_asset(generic_asset_type: str, **kwargs) -> GenericAsset: diff --git a/flexmeasures/data/models/planning/linear_optimization.py b/flexmeasures/data/models/planning/linear_optimization.py index 32d8b7456..f0d79f866 100644 --- a/flexmeasures/data/models/planning/linear_optimization.py +++ b/flexmeasures/data/models/planning/linear_optimization.py @@ -21,6 +21,7 @@ from pyomo.opt import SolverFactory, SolverResults from flexmeasures.data.models.planning.utils import initialize_series +from flexmeasures.utils.calculations import apply_stock_changes_and_losses infinity = float("inf") @@ -31,6 +32,7 @@ def device_scheduler( # noqa C901 commitment_quantities: List[pd.Series], commitment_downwards_deviation_price: Union[List[pd.Series], List[float]], commitment_upwards_deviation_price: Union[List[pd.Series], List[float]], + initial_stock: float = 0, ) -> Tuple[List[pd.Series], float, SolverResults]: """This generic device scheduler is able to handle an EMS with multiple devices, with various types of constraints on the EMS level and on the device level, @@ -43,6 +45,7 @@ def device_scheduler( # noqa C901 max: maximum stock assuming an initial stock of zero (e.g. in MWh or boxes) min: minimum stock assuming an initial stock of zero equal: exact amount of stock (we do this by clamping min and max) + efficiency: amount of stock left at the next datetime (the rest is lost) derivative max: maximum flow (e.g. in MW or boxes/h) derivative min: minimum flow derivative equals: exact amount of flow (we do this by clamping derivative min and derivative max) @@ -71,7 +74,8 @@ def device_scheduler( # noqa C901 # Check if commitments have the same time window and resolution as the constraints start = device_constraints[0].index.to_pydatetime()[0] - resolution = pd.to_timedelta(device_constraints[0].index.freq) + # Workaround for https://github.com/pandas-dev/pandas/issues/53643. Was: resolution = pd.to_timedelta(device_constraints[0].index.freq) + resolution = pd.to_timedelta(device_constraints[0].index.freq).to_pytimedelta() end = device_constraints[0].index.to_pydatetime()[-1] + resolution if len(commitment_quantities) != 0: start_c = commitment_quantities[0].index.to_pydatetime()[0] @@ -171,6 +175,16 @@ def ems_derivative_min_select(m, j): else: return v + def device_efficiency(m, d, j): + """Assume perfect efficiency if no efficiency information is available.""" + try: + eff = device_constraints[d]["efficiency"].iloc[j] + except KeyError: + return 1 + if np.isnan(eff): + return 1 + return eff + def device_derivative_down_efficiency(m, d, j): """Assume perfect efficiency if no efficiency information is available.""" try: @@ -206,6 +220,7 @@ def device_derivative_up_efficiency(m, d, j): ) model.ems_derivative_max = Param(model.j, initialize=ems_derivative_max_select) model.ems_derivative_min = Param(model.j, initialize=ems_derivative_min_select) + model.device_efficiency = Param(model.d, model.j, initialize=device_efficiency) model.device_derivative_down_efficiency = Param( model.d, model.j, initialize=device_derivative_down_efficiency ) @@ -228,14 +243,24 @@ def device_derivative_up_efficiency(m, d, j): # Add constraints as a tuple of (lower bound, value, upper bound) def device_bounds(m, d, j): - """Apply efficiencies to conversion from flow to stock change and vice versa.""" - return ( - m.device_min[d, j], - sum( + """Apply conversion efficiencies to conversion from flow to stock change and vice versa, + and apply storage efficiencies to stock levels from one datetime to the next.""" + stock_changes = [ + ( m.device_power_down[d, k] / m.device_derivative_down_efficiency[d, k] + m.device_power_up[d, k] * m.device_derivative_up_efficiency[d, k] - for k in range(0, j + 1) - ), + ) + for k in range(0, j + 1) + ] + efficiencies = [m.device_efficiency[d, k] for k in range(0, j + 1)] + return ( + m.device_min[d, j], + [ + stock - initial_stock + for stock in apply_stock_changes_and_losses( + initial_stock, stock_changes, efficiencies + ) + ][-1], m.device_max[d, j], ) diff --git a/flexmeasures/data/models/planning/storage.py b/flexmeasures/data/models/planning/storage.py index 74191180a..d3bb52fb5 100644 --- a/flexmeasures/data/models/planning/storage.py +++ b/flexmeasures/data/models/planning/storage.py @@ -3,7 +3,6 @@ import re import copy from datetime import datetime, timedelta -from typing import List, Dict import pandas as pd import numpy as np @@ -34,6 +33,7 @@ class StorageScheduler(Scheduler): "equals", "max", "min", + "efficiency", "derivative equals", "derivative max", "derivative min", @@ -73,6 +73,7 @@ def compute(self, skip_validation: bool = False) -> pd.Series | None: soc_minima = self.flex_model.get("soc_minima") soc_maxima = self.flex_model.get("soc_maxima") roundtrip_efficiency = self.flex_model.get("roundtrip_efficiency") + storage_efficiency = self.flex_model.get("storage_efficiency") prefer_charging_sooner = self.flex_model.get("prefer_charging_sooner", True) consumption_price_sensor = self.flex_context.get("consumption_price_sensor") @@ -126,7 +127,7 @@ def compute(self, skip_validation: bool = False) -> pd.Series | None: down_deviation_prices.loc[start : end - resolution]["event_value"] ] - # Set up device _constraints: only one scheduled flexible device for this EMS (at index 0), plus the forecasted inflexible devices (at indices 1 to n). + # Set up device constraints: only one scheduled flexible device for this EMS (at index 0), plus the forecasted inflexible devices (at indices 1 to n). device_constraints = [ initialize_df(StorageScheduler.COLUMNS, start, end, resolution) for i in range(1 + len(inflexible_device_sensors)) @@ -170,6 +171,9 @@ def compute(self, skip_validation: bool = False) -> pd.Series | None: ) device_constraints[0]["derivative up efficiency"] = roundtrip_efficiency**0.5 + # Apply storage efficiency (accounts for losses over time) + device_constraints[0]["efficiency"] = storage_efficiency + # check that storage constraints are fulfilled if not skip_validation: constraint_violations = validate_storage_constraints( @@ -182,7 +186,11 @@ def compute(self, skip_validation: bool = False) -> pd.Series | None: if len(constraint_violations) > 0: # TODO: include hints from constraint_violations into the error message - raise ValueError("The input data yields an infeasible problem.") + message = create_constraint_violations_message(constraint_violations) + raise ValueError( + "The input data yields an infeasible problem. Constraint validation has found the following issues:\n" + + message + ) # Set up EMS constraints ems_constraints = initialize_df( @@ -199,6 +207,7 @@ def compute(self, skip_validation: bool = False) -> pd.Series | None: commitment_quantities, commitment_downwards_deviation_price, commitment_upwards_deviation_price, + initial_stock=soc_at_start * (timedelta(hours=1) / resolution), ) if scheduler_results.solver.termination_condition == "infeasible": # Fallback policy if the problem was unsolvable @@ -268,7 +277,19 @@ def deserialize_flex_config(self): elif self.sensor.unit in ("MW", "kW"): self.flex_model["soc-unit"] = self.sensor.unit + "h" + # Check for storage efficiency + # todo: simplify to: `if self.flex_model.get("storage-efficiency") is None:` + if ( + "storage-efficiency" not in self.flex_model + or self.flex_model["storage-efficiency"] is None + ): + # Get default from sensor, or use 100% otherwise + self.flex_model["storage-efficiency"] = self.sensor.get_attribute( + "storage_efficiency", 1 + ) + # Check for round-trip efficiency + # todo: simplify to: `if self.flex_model.get("roundtrip-efficiency") is None:` if ( "roundtrip-efficiency" not in self.flex_model or self.flex_model["roundtrip-efficiency"] is None @@ -365,8 +386,25 @@ def ensure_soc_min_max(self): ) +def create_constraint_violations_message(constraint_violations: list) -> str: + """Create a human-readable message with the constraint_violations. + + :param constraint_violations: list with the constraint violations + :return: human-readable message + """ + message = "" + + for c in constraint_violations: + message += f"t={c['dt']} | {c['violation']}\n" + + if len(message) > 1: + message = message[:-1] + + return message + + def build_device_soc_values( - soc_values: List[Dict[str, datetime | float]] | pd.Series, + soc_values: list[dict[str, datetime | float]] | pd.Series, soc_at_start: float, start_of_schedule: datetime, end_of_schedule: datetime, @@ -435,9 +473,9 @@ def add_storage_constraints( end: datetime, resolution: timedelta, soc_at_start: float, - soc_targets: List[Dict[str, datetime | float]] | pd.Series | None, - soc_maxima: List[Dict[str, datetime | float]] | pd.Series | None, - soc_minima: List[Dict[str, datetime | float]] | pd.Series | None, + soc_targets: list[dict[str, datetime | float]] | pd.Series | None, + soc_maxima: list[dict[str, datetime | float]] | pd.Series | None, + soc_minima: list[dict[str, datetime | float]] | pd.Series | None, soc_max: float, soc_min: float, ) -> pd.DataFrame: @@ -560,25 +598,33 @@ def validate_storage_constraints( # 1) min >= soc_min soc_min = (soc_min - soc_at_start) * timedelta(hours=1) / resolution _constraints["soc_min(t)"] = soc_min - constraint_violations += validate_constraint(_constraints, "soc_min(t) <= min(t)") + constraint_violations += validate_constraint( + _constraints, "soc_min(t)", "<=", "min(t)" + ) # 2) max <= soc_max soc_max = (soc_max - soc_at_start) * timedelta(hours=1) / resolution _constraints["soc_max(t)"] = soc_max - constraint_violations += validate_constraint(_constraints, "max(t) <= soc_max(t)") + constraint_violations += validate_constraint( + _constraints, "max(t)", "<=", "soc_max(t)" + ) ######################################## # B. Validation in the same time frame # ######################################## # 1) min <= max - constraint_violations += validate_constraint(_constraints, "min(t) <= max(t)") + constraint_violations += validate_constraint(_constraints, "min(t)", "<=", "max(t)") # 2) min <= equals - constraint_violations += validate_constraint(_constraints, "min(t) <= equals(t)") + constraint_violations += validate_constraint( + _constraints, "min(t)", "<=", "equals(t)" + ) # 3) equals <= max - constraint_violations += validate_constraint(_constraints, "equals(t) <= max(t)") + constraint_violations += validate_constraint( + _constraints, "equals(t)", "<=", "max(t)" + ) ########################################## # C. Validation in different time frames # @@ -591,32 +637,38 @@ def validate_storage_constraints( # 1) equals(t) - equals(t-1) <= derivative_max(t) constraint_violations += validate_constraint( - _constraints, "equals(t) - equals(t-1) <= derivative_max(t) * factor_w_wh(t)" + _constraints, + "equals(t) - equals(t-1)", + "<=", + "derivative_max(t) * factor_w_wh(t)", ) # 2) derivative_min(t) <= equals(t) - equals(t-1) constraint_violations += validate_constraint( - _constraints, "derivative_min(t) * factor_w_wh(t) <= equals(t) - equals(t-1)" + _constraints, + "derivative_min(t) * factor_w_wh(t)", + "<=", + "equals(t) - equals(t-1)", ) # 3) min(t) - max(t-1) <= derivative_max(t) constraint_violations += validate_constraint( - _constraints, "min(t) - max(t-1) <= derivative_max(t) * factor_w_wh(t)" + _constraints, "min(t) - max(t-1)", "<=", "derivative_max(t) * factor_w_wh(t)" ) # 4) max(t) - min(t-1) >= derivative_min(t) constraint_violations += validate_constraint( - _constraints, "derivative_min(t) * factor_w_wh(t) <= max(t) - min(t-1)" + _constraints, "derivative_min(t) * factor_w_wh(t)", "<=", "max(t) - min(t-1)" ) # 5) equals(t) - max(t-1) <= derivative_max(t) constraint_violations += validate_constraint( - _constraints, "equals(t) - max(t-1) <= derivative_max(t) * factor_w_wh(t)" + _constraints, "equals(t) - max(t-1)", "<=", "derivative_max(t) * factor_w_wh(t)" ) # 6) derivative_min(t) <= equals(t) - min(t-1) constraint_violations += validate_constraint( - _constraints, "derivative_min(t) * factor_w_wh(t) <= equals(t) - min(t-1)" + _constraints, "derivative_min(t) * factor_w_wh(t)", "<=", "equals(t) - min(t-1)" ) return constraint_violations @@ -640,33 +692,85 @@ def get_pattern_match_word(word: str) -> str: return regex + re.escape(word) + regex +def sanitize_expression(expression: str, columns: list) -> tuple[str, list]: + """Wrap column in commas to accept arbitrary column names (e.g. with spaces). + + :param expression: expression to sanitize + :param columns: list with the name of the columns of the input data for the expression. + :return: sanitized expression and columns (variables) used in the expression + """ + + _expression = copy.copy(expression) + columns_involved = [] + + for column in columns: + + if re.search(get_pattern_match_word(column), _expression): + columns_involved.append(column) + + _expression = re.sub(get_pattern_match_word(column), f"`{column}`", _expression) + + return _expression, columns_involved + + def validate_constraint( - constraints_df: pd.DataFrame, constraint_expression: str + constraints_df: pd.DataFrame, + lhs_expression: str, + inequality: str, + rhs_expression: str, + round_to_decimals: int | None = 6, ) -> list[dict]: """Validate the feasibility of a given set of constraints. - :param constraints_df: DataFrame with the constraints - :param constraint_expression: inequality expression following pd.eval format. - No need to use the syntax `column` to reference - column, just use the column name. - :return: List of constraint violations, specifying their time, constraint and violation. + :param constraints_df: DataFrame with the constraints + :param lhs_expression: left-hand side of the inequality expression following pd.eval format. + No need to use the syntax `column` to reference + column, just use the column name. + :param inequality: inequality operator, one of ('<=', '<', '>=', '>', '==', '!='). + :param rhs_expression: right-hand side of the inequality expression following pd.eval format. + No need to use the syntax `column` to reference + column, just use the column name. + :param round_to_decimals: Number of decimals to round off to before validating constraints. + :return: List of constraint violations, specifying their time, constraint and violation. """ - columns_involved = [] + constraint_expression = f"{lhs_expression} {inequality} {rhs_expression}" - eval_expression = copy.copy(constraint_expression) + constraints_df_columns = list(constraints_df.columns) - for column in constraints_df.columns: - if re.search(get_pattern_match_word(column), eval_expression): - columns_involved.append(column) + lhs_expression, columns_lhs = sanitize_expression( + lhs_expression, constraints_df_columns + ) + rhs_expression, columns_rhs = sanitize_expression( + rhs_expression, constraints_df_columns + ) - eval_expression = re.sub( - get_pattern_match_word(column), f"`{column}`", eval_expression - ) + columns_involved = columns_lhs + columns_rhs + + lhs = constraints_df.fillna(0).eval(lhs_expression).round(round_to_decimals) + rhs = constraints_df.fillna(0).eval(rhs_expression).round(round_to_decimals) + + condition = None + + inequality = inequality.strip() + + if inequality == "<=": + condition = lhs <= rhs + elif inequality == "<": + condition = lhs < rhs + elif inequality == ">=": + condition = lhs >= rhs + elif inequality == ">": + condition = lhs > rhs + elif inequality == "==": + condition = lhs == rhs + elif inequality == "!=": + condition = lhs != rhs + else: + raise ValueError(f"Inequality `{inequality} not supported.") time_condition_fails = constraints_df.index[ - ~constraints_df.fillna(0).eval(eval_expression) - & ~constraints_df[columns_involved].isna().any(axis=1) + ~condition & ~constraints_df[columns_involved].isna().any(axis=1) ] constraint_violations = [] @@ -677,7 +781,7 @@ def validate_constraint( for column in constraints_df.columns: value_replaced = re.sub( get_pattern_match_word(column), - f"{column} [{constraints_df.loc[dt, column]}]", + f"{column} [{constraints_df.loc[dt, column]}] ", value_replaced, ) @@ -712,7 +816,7 @@ def prepend_serie(serie: pd.Series, value) -> pd.Series: #################### @deprecated(build_device_soc_values, "0.14") def build_device_soc_targets( - targets: List[Dict[str, datetime | float]] | pd.Series, + targets: list[dict[str, datetime | float]] | pd.Series, soc_at_start: float, start_of_schedule: datetime, end_of_schedule: datetime, diff --git a/flexmeasures/data/models/planning/tests/test_solver.py b/flexmeasures/data/models/planning/tests/test_solver.py index 9baf56cb5..ec16608c5 100644 --- a/flexmeasures/data/models/planning/tests/test_solver.py +++ b/flexmeasures/data/models/planning/tests/test_solver.py @@ -13,19 +13,51 @@ validate_storage_constraints, ) from flexmeasures.data.models.planning.utils import initialize_series, initialize_df -from flexmeasures.utils.calculations import integrate_time_series +from flexmeasures.utils.calculations import ( + apply_stock_changes_and_losses, + integrate_time_series, +) TOLERANCE = 0.00001 +@pytest.mark.parametrize( + "initial_stock, stock_deltas, expected_stocks, storage_efficiency", + [ + ( + 1000, + [100, -100, -100, 100], + [1000, 1089, 979.11, 870.3189, 960.615711], + 0.99, + ), + ( + 2.5, + [-0.5, -0.5, -0.5, -0.5], + [2.5, 1.8, 1.17, 0.603, 0.0927], + 0.9, + ), + ], +) +def test_storage_loss_function( + initial_stock, stock_deltas, expected_stocks, storage_efficiency +): + stocks = apply_stock_changes_and_losses( + initial_stock, + stock_deltas, + storage_efficiency=storage_efficiency, + how="left", + decimal_precision=6, + ) + print(stocks) + assert all(a == b for a, b in zip(stocks, expected_stocks)) + + @pytest.mark.parametrize("use_inflexible_device", [False, True]) def test_battery_solver_day_1( add_battery_assets, add_inflexible_device_forecasts, use_inflexible_device ): - epex_da = Sensor.query.filter(Sensor.name == "epex_da").one_or_none() - battery = Sensor.query.filter(Sensor.name == "Test battery").one_or_none() - assert battery.get_attribute("market_id") == epex_da.id + epex_da, battery = get_sensors_from_db() tz = pytz.timezone("Europe/Amsterdam") start = tz.localize(datetime(2015, 1, 1)) end = tz.localize(datetime(2015, 1, 2)) @@ -62,14 +94,18 @@ def test_battery_solver_day_1( @pytest.mark.parametrize( - "roundtrip_efficiency", + "roundtrip_efficiency, storage_efficiency", [ - 1, - 0.99, - 0.01, + (1, 1), + (1, 0.999), + (1, 0.5), + (0.99, 1), + (0.01, 1), ], ) -def test_battery_solver_day_2(add_battery_assets, roundtrip_efficiency: float): +def test_battery_solver_day_2( + add_battery_assets, roundtrip_efficiency: float, storage_efficiency: float +): """Check battery scheduling results for day 2, which is set up with 8 expensive, then 8 cheap, then again 8 expensive hours. If efficiency losses aren't too bad, we expect the scheduler to: @@ -80,9 +116,7 @@ def test_battery_solver_day_2(add_battery_assets, roundtrip_efficiency: float): and so we expect the scheduler to only: - completely discharge within the last 8 hours """ - epex_da = Sensor.query.filter(Sensor.name == "epex_da").one_or_none() - battery = Sensor.query.filter(Sensor.name == "Test battery").one_or_none() - assert battery.get_attribute("market_id") == epex_da.id + _epex_da, battery = get_sensors_from_db() tz = pytz.timezone("Europe/Amsterdam") start = tz.localize(datetime(2015, 1, 2)) end = tz.localize(datetime(2015, 1, 3)) @@ -100,6 +134,7 @@ def test_battery_solver_day_2(add_battery_assets, roundtrip_efficiency: float): "soc-min": soc_min, "soc-max": soc_max, "roundtrip-efficiency": roundtrip_efficiency, + "storage-efficiency": storage_efficiency, }, ) schedule = scheduler.compute() @@ -108,6 +143,7 @@ def test_battery_solver_day_2(add_battery_assets, roundtrip_efficiency: float): soc_at_start, up_efficiency=roundtrip_efficiency**0.5, down_efficiency=roundtrip_efficiency**0.5, + storage_efficiency=storage_efficiency, decimal_precision=6, ) @@ -126,22 +162,30 @@ def test_battery_solver_day_2(add_battery_assets, roundtrip_efficiency: float): soc_min, battery.get_attribute("min_soc_in_mwh") ) # Battery sold out at the end of its planning horizon - # As long as the roundtrip efficiency isn't too bad (I haven't computed the actual switch point) - if roundtrip_efficiency > 0.9: + # As long as the efficiencies aren't too bad (I haven't computed the actual switch points) + if roundtrip_efficiency > 0.9 and storage_efficiency > 0.9: assert soc_schedule.loc[start + timedelta(hours=8)] == max( soc_min, battery.get_attribute("min_soc_in_mwh") ) # Sell what you begin with assert soc_schedule.loc[start + timedelta(hours=16)] == min( soc_max, battery.get_attribute("max_soc_in_mwh") ) # Buy what you can to sell later - else: - # If the roundtrip efficiency is poor, best to stand idle + elif storage_efficiency > 0.9: + # If only the roundtrip efficiency is poor, best to stand idle (keep a high SoC as long as possible) assert soc_schedule.loc[start + timedelta(hours=8)] == battery.get_attribute( "soc_in_mwh" ) assert soc_schedule.loc[start + timedelta(hours=16)] == battery.get_attribute( "soc_in_mwh" ) + else: + # If the storage efficiency is poor, regardless of whether the roundtrip efficiency is poor, best to sell asap + assert soc_schedule.loc[start + timedelta(hours=8)] == max( + soc_min, battery.get_attribute("min_soc_in_mwh") + ) + assert soc_schedule.loc[start + timedelta(hours=16)] == max( + soc_min, battery.get_attribute("min_soc_in_mwh") + ) @pytest.mark.parametrize( @@ -188,6 +232,9 @@ def test_charging_station_solver_day_2(target_soc, charging_station_name): "roundtrip_efficiency": charging_station.get_attribute( "roundtrip_efficiency", 1 ), + "storage_efficiency": charging_station.get_attribute( + "storage_efficiency", 1 + ), "soc_targets": soc_targets, }, ) @@ -261,6 +308,9 @@ def test_fallback_to_unsolvable_problem(target_soc, charging_station_name): "roundtrip_efficiency": charging_station.get_attribute( "roundtrip_efficiency", 1 ), + "storage_efficiency": charging_station.get_attribute( + "storage_efficiency", 1 + ), "soc_targets": soc_targets, }, ) @@ -351,6 +401,7 @@ def test_building_solver_day_2( "soc_min": soc_min, "soc_max": soc_max, "roundtrip_efficiency": battery.get_attribute("roundtrip_efficiency", 1), + "storage_efficiency": battery.get_attribute("storage_efficiency", 1), }, flex_context={ "inflexible_device_sensors": inflexible_devices.values(), @@ -435,9 +486,7 @@ def test_soc_bounds_timeseries(add_battery_assets): """ # get the sensors from the database - epex_da = Sensor.query.filter(Sensor.name == "epex_da").one_or_none() - battery = Sensor.query.filter(Sensor.name == "Test battery").one_or_none() - assert battery.get_attribute("market_id") == epex_da.id + epex_da, battery = get_sensors_from_db() # time parameters tz = pytz.timezone("Europe/Amsterdam") @@ -511,15 +560,15 @@ def compute_schedule(flex_model): # test for soc_minima # check that the local minimum constraint is respected - assert soc_schedule2.loc[datetime(2015, 1, 2, 7)] >= 3.5 + assert soc_schedule2.loc["2015-01-02T08:00:00+01:00"] >= 3.5 # test for soc_maxima # check that the local maximum constraint is respected - assert soc_schedule2.loc[datetime(2015, 1, 2, 14)] <= 1.0 + assert soc_schedule2.loc["2015-01-02T15:00:00+01:00"] <= 1.0 # test for soc_targets # check that the SOC target (at 19 pm, local time) is met - assert soc_schedule2.loc[datetime(2015, 1, 2, 18)] == 2.0 + assert soc_schedule2.loc["2015-01-02T19:00:00+01:00"] == 2.0 @pytest.mark.parametrize( @@ -585,6 +634,26 @@ def test_add_storage_constraints( @pytest.mark.parametrize( "value_min1, value_equals1, value_max1, value_min2, value_equals2, value_max2, expected_constraint_type_violations", [ + (1, np.nan, 9, 1, np.nan, 9, []), # base case + (1, np.nan, 10, 1, np.nan, 10, []), # exact equality + ( + 1, + np.nan, + 10 + 0.5e-6, + 1, + np.nan, + 10, + [], + ), # equality considering the precision (6 decimal figures) + ( + 1, + np.nan, + 10 + 1e-5, + 1, + np.nan, + 10, + ["max(t) <= soc_max(t)"], + ), # difference of 0.5e-5 > 1e-6 (1, np.nan, 9, 2, np.nan, 20, ["max(t) <= soc_max(t)"]), (-1, np.nan, 9, 1, np.nan, 9, ["soc_min(t) <= min(t)"]), (1, 10, 9, 1, np.nan, 9, ["equals(t) <= max(t)"]), @@ -717,3 +786,61 @@ def test_validate_constraints( ) assert set(expected_constraint_type_violations) == constraint_type_violations_output + + +def test_infeasible_problem_error(add_battery_assets): + """Try to create a schedule with infeasible constraints. soc-max is 4.5 and soc-target is 8.0""" + + # get the sensors from the database + _epex_da, battery = get_sensors_from_db() + + # time parameters + tz = pytz.timezone("Europe/Amsterdam") + start = tz.localize(datetime(2015, 1, 2)) + end = tz.localize(datetime(2015, 1, 3)) + resolution = timedelta(hours=1) + + def compute_schedule(flex_model): + scheduler = StorageScheduler( + battery, + start, + end, + resolution, + flex_model=flex_model, + ) + schedule = scheduler.compute() + + soc_schedule = integrate_time_series( + schedule, + soc_at_start, + decimal_precision=1, + ) + + return soc_schedule + + # soc parameters + soc_at_start = battery.get_attribute("soc_in_mwh") + infeasible_max_soc_targets = [ + {"datetime": "2015-01-02T16:00:00+01:00", "value": 8.0} + ] + + flex_model = { + "soc-at-start": soc_at_start, + "soc-min": 0.5, + "soc-max": 4.5, + "soc-targets": infeasible_max_soc_targets, + } + + with pytest.raises( + ValueError, match="The input data yields an infeasible problem." + ): + compute_schedule(flex_model) + + +def get_sensors_from_db(): + # get the sensors from the database + epex_da = Sensor.query.filter(Sensor.name == "epex_da").one_or_none() + battery = Sensor.query.filter(Sensor.name == "Test battery").one_or_none() + assert battery.get_attribute("market_id") == epex_da.id + + return epex_da, battery diff --git a/flexmeasures/data/models/reporting/aggregator.py b/flexmeasures/data/models/reporting/aggregator.py new file mode 100644 index 000000000..e74596ae0 --- /dev/null +++ b/flexmeasures/data/models/reporting/aggregator.py @@ -0,0 +1,77 @@ +from __future__ import annotations + +from datetime import datetime, timedelta + +import timely_beliefs as tb +import pandas as pd + +from flexmeasures.data.models.reporting import Reporter +from flexmeasures.data.schemas.reporting.aggregation import AggregatorSchema + +from flexmeasures.utils.time_utils import server_now + + +class AggregatorReporter(Reporter): + """This reporter applies an aggregation function to multiple sensors""" + + __version__ = "1" + __author__ = "Seita" + schema = AggregatorSchema() + weights: dict + method: str + + def deserialize_config(self): + # call Reporter deserialize_config + super().deserialize_config() + + # extract AggregatorReporter specific fields + self.method = self.reporter_config.get("method") + self.weights = self.reporter_config.get("weights", dict()) + + def _compute( + self, + start: datetime, + end: datetime, + input_resolution: timedelta | None = None, + belief_time: datetime | None = None, + ) -> tb.BeliefsDataFrame: + """ + This method merges all the BeliefDataFrames into a single one, dropping + all indexes but event_start, and applies an aggregation function over the + columns. + """ + + dataframes = [] + + if belief_time is None: + belief_time = server_now() + + for belief_search_config in self.beliefs_search_configs: + # if alias is not in belief_search_config, using the Sensor id instead + column_name = belief_search_config.get( + "alias", f"sensor_{belief_search_config['sensor'].id}" + ) + data = self.data[column_name].droplevel([1, 2, 3]) + + # apply weight + if column_name in self.weights: + data *= self.weights[column_name] + + dataframes.append(data) + + output_df = pd.concat(dataframes, axis=1) + + # apply aggregation method + output_df = output_df.aggregate(self.method, axis=1) + + # convert BeliefsSeries into a BeliefsDataFrame + output_df = output_df.to_frame("event_value") + output_df["belief_time"] = belief_time + output_df["cumulative_probability"] = 0.5 + output_df["source"] = self.data_source + + output_df = output_df.set_index( + ["belief_time", "source", "cumulative_probability"], append=True + ) + + return output_df diff --git a/flexmeasures/data/models/reporting/pandas_reporter.py b/flexmeasures/data/models/reporting/pandas_reporter.py index 3b98bb2f6..1e1e98179 100644 --- a/flexmeasures/data/models/reporting/pandas_reporter.py +++ b/flexmeasures/data/models/reporting/pandas_reporter.py @@ -10,7 +10,6 @@ from flexmeasures.data.schemas.reporting.pandas_reporter import ( PandasReporterConfigSchema, ) -from flexmeasures.data.models.time_series import TimedBelief from flexmeasures.utils.time_utils import server_now @@ -18,7 +17,7 @@ class PandasReporter(Reporter): """This reporter applies a series of pandas methods on""" __version__ = "1" - __author__ = None + __author__ = "Seita" schema = PandasReporterConfigSchema() transformations: list[dict[str, Any]] = None final_df_output: str = None @@ -43,6 +42,9 @@ def _compute( defined in `final_df_output` field of the report_config. """ + if belief_time is None: + belief_time = server_now() + # apply pandas transformations to the dataframes in `self.data` self._apply_transformations() @@ -51,9 +53,9 @@ def _compute( if isinstance(final_output, tb.BeliefsDataFrame): # filing the missing indexes with default values: - # belief_time=server_now(), cummulative_probability=0.5, source=data_source + # belief_time=belief_time, cummulative_probability=0.5, source=data_source if "belief_time" not in final_output.index.names: - final_output["belief_time"] = [server_now()] * len(final_output) + final_output["belief_time"] = [belief_time] * len(final_output) final_output = final_output.set_index("belief_time", append=True) if "cumulative_probability" not in final_output.index.names: @@ -71,18 +73,13 @@ def _compute( ) elif isinstance(final_output, tb.BeliefsSeries): - - timed_beliefs = [ - TimedBelief( - sensor=final_output.sensor, - source=self.data_source, - belief_time=server_now(), - event_start=event_start, - event_value=event_value, - ) - for event_start, event_value in final_output.iteritems() - ] - final_output = tb.BeliefsDataFrame(timed_beliefs) + final_output = final_output.to_frame("event_value") + final_output["belief_time"] = belief_time + final_output["cumulative_probability"] = 0.5 + final_output["source"] = self.data_source + final_output = final_output.set_index( + ["belief_time", "source", "cumulative_probability"], append=True + ) return final_output diff --git a/flexmeasures/data/models/reporting/tests/conftest.py b/flexmeasures/data/models/reporting/tests/conftest.py index 347196842..a75a1a465 100644 --- a/flexmeasures/data/models/reporting/tests/conftest.py +++ b/flexmeasures/data/models/reporting/tests/conftest.py @@ -63,6 +63,20 @@ def setup_dummy_data(db, app): ) ) + # add simple data for testing the AggregatorReporter: + # 24 hourly events with value 1 for sensor1 and value -1 for sensor2 + for sensor, source, value in zip([sensor1, sensor2], [source1, source2], [1, -1]): + for t in range(24): + beliefs.append( + TimedBelief( + event_start=datetime(2023, 5, 10, tzinfo=utc) + timedelta(hours=t), + belief_horizon=timedelta(hours=24), + event_value=value, + sensor=sensor, + source=source, + ) + ) + db.session.add_all(beliefs) db.session.commit() diff --git a/flexmeasures/data/models/reporting/tests/test_aggregator.py b/flexmeasures/data/models/reporting/tests/test_aggregator.py new file mode 100644 index 000000000..8cd287fa4 --- /dev/null +++ b/flexmeasures/data/models/reporting/tests/test_aggregator.py @@ -0,0 +1,60 @@ +import pytest + +from flexmeasures.data.models.reporting.aggregator import AggregatorReporter + +from datetime import datetime +from pytz import utc + + +@pytest.mark.parametrize( + "aggregation_method, expected_value", + [ + ("sum", 0), + ("mean", 0), + ("var", 2), + ("std", 2**0.5), + ("max", 1), + ("min", -1), + ("prod", -1), + ("median", 0), + ], +) +def test_aggregator(setup_dummy_data, aggregation_method, expected_value): + """ + This test computes the aggregation of two sensors containing 24 entries + with value 1 and -1, respectively, for sensors 1 and 2. + + Test cases: + 1) sum: 0 = 1 + (-1) + 2) mean: 0 = ((1) + (-1))/2 + 3) var: 2 = (1)^2 + (-1)^2 + 4) std: sqrt(2) = sqrt((1)^2 + (-1)^2) + 5) max: 1 = max(1, -1) + 6) min: -1 = min(1, -1) + 7) prod: -1 = (1) * (-1) + 8) median: even number of elements, mean of the most central elements, 0 = ((1) + (-1))/2 + """ + s1, s2, reporter_sensor = setup_dummy_data + + reporter_config_raw = dict( + beliefs_search_configs=[ + dict(sensor=s1.id, source=1), + dict(sensor=s2.id, source=2), + ], + method=aggregation_method, + ) + + agg_reporter = AggregatorReporter( + reporter_sensor, reporter_config_raw=reporter_config_raw + ) + + result = agg_reporter.compute( + start=datetime(2023, 5, 10, tzinfo=utc), + end=datetime(2023, 5, 11, tzinfo=utc), + ) + + # check that we got a result for 24 hours + assert len(result) == 24 + + # check that the value is equal to expected_value + assert (result == expected_value).all().event_value diff --git a/flexmeasures/data/models/time_series.py b/flexmeasures/data/models/time_series.py index 18b9e115b..70f1c1f08 100644 --- a/flexmeasures/data/models/time_series.py +++ b/flexmeasures/data/models/time_series.py @@ -10,6 +10,7 @@ from sqlalchemy.ext.mutable import MutableDict from sqlalchemy.orm import Query, Session from sqlalchemy.schema import UniqueConstraint +from sqlalchemy import inspect import timely_beliefs as tb from timely_beliefs.beliefs.probabilistic_utils import get_median_belief import timely_beliefs.utils as tb_utils @@ -18,6 +19,7 @@ from flexmeasures.data import db from flexmeasures.data.models.parsing_utils import parse_source_arg from flexmeasures.data.services.annotations import prepare_annotations_for_chart +from flexmeasures.data.services.timerange import get_timerange from flexmeasures.data.queries.utils import ( create_beliefs_query, get_belief_timing_criteria, @@ -42,7 +44,6 @@ from flexmeasures.data.models.generic_assets import GenericAsset from flexmeasures.data.models.validation_utils import check_required_attributes from flexmeasures.data.queries.sensors import query_sensors_by_proximity -from flexmeasures.utils.time_utils import server_now from flexmeasures.utils.geo_utils import parse_lat_lng @@ -479,23 +480,8 @@ def timerange(self) -> dict[str, datetime_type]: 'end': datetime.datetime(2020, 12, 3, 14, 30, tzinfo=pytz.utc) } """ - least_recent_query = ( - TimedBelief.query.filter(TimedBelief.sensor == self) - .order_by(TimedBelief.event_start.asc()) - .limit(1) - ) - most_recent_query = ( - TimedBelief.query.filter(TimedBelief.sensor == self) - .order_by(TimedBelief.event_start.desc()) - .limit(1) - ) - results = least_recent_query.union_all(most_recent_query).all() - if not results: - # return now in case there is no data for the sensor - now = server_now() - return dict(start=now, end=now) - least_recent, most_recent = results - return dict(start=least_recent.event_start, end=most_recent.event_end) + start, end = get_timerange([self.id]) + return dict(start=start, end=end) def __repr__(self) -> str: return f"" @@ -584,6 +570,14 @@ def __init__( source: tb.DBBeliefSource, **kwargs, ): + # get a Sensor instance attached to the database session (input sensor is detached) + # check out Issue #683 for more details + inspection_obj = inspect(sensor, raiseerr=False) + if ( + inspection_obj and inspection_obj.detached + ): # fetch Sensor only when it is detached + sensor = Sensor.query.get(sensor.id) + tb.TimedBeliefDBMixin.__init__(self, sensor, source, **kwargs) tb_utils.remove_class_init_kwargs(tb.TimedBeliefDBMixin, kwargs) db.Model.__init__(self, **kwargs) @@ -702,7 +696,7 @@ def search( # todo: compute median of collective belief instead of median of first belief (update expected test results accordingly) # todo: move to timely-beliefs: select mean/median belief if ( - bdf.lineage.number_of_sources == 1 + bdf.lineage.number_of_sources <= 1 and bdf.lineage.probabilistic_depth == 1 ): # Fast track, no need to loop over beliefs diff --git a/flexmeasures/data/queries/__init__.py b/flexmeasures/data/queries/__init__.py index e69de29bb..9f2f86f77 100644 --- a/flexmeasures/data/queries/__init__.py +++ b/flexmeasures/data/queries/__init__.py @@ -0,0 +1,3 @@ +""" +Data query functions +""" diff --git a/flexmeasures/data/queries/data_sources.py b/flexmeasures/data/queries/data_sources.py index 7352d7f38..e77e164f2 100644 --- a/flexmeasures/data/queries/data_sources.py +++ b/flexmeasures/data/queries/data_sources.py @@ -18,7 +18,7 @@ def get_or_create_source( model: str | None = None, flush: bool = True, ) -> DataSource: - return get_or_create_source_new(source, source_type, model, flush) + return get_or_create_source_new(source, source_type, model, flush=flush) @deprecated(get_source_or_none_new, "0.14") diff --git a/flexmeasures/data/schemas/__init__.py b/flexmeasures/data/schemas/__init__.py index 2d72e2f1f..64be51f77 100644 --- a/flexmeasures/data/schemas/__init__.py +++ b/flexmeasures/data/schemas/__init__.py @@ -1,3 +1,7 @@ +""" +Data schemas (Marshmallow) +""" + from .assets import LatitudeField, LongitudeField # noqa F401 from .generic_assets import GenericAssetIdField as AssetIdField # noqa F401 from .sensors import SensorIdField # noqa F401 diff --git a/flexmeasures/data/schemas/generic_assets.py b/flexmeasures/data/schemas/generic_assets.py index 469c29951..30867f21c 100644 --- a/flexmeasures/data/schemas/generic_assets.py +++ b/flexmeasures/data/schemas/generic_assets.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from marshmallow import validates, validates_schema, ValidationError, fields @@ -46,16 +48,26 @@ class Meta: @validates_schema(skip_on_field_errors=False) def validate_name_is_unique_in_account(self, data, **kwargs): - if "name" in data and "account_id" in data: - asset = GenericAsset.query.filter( - GenericAsset.name == data["name"] - and GenericAsset.account_id == data["account_id"] - ).one_or_none() - if asset: - raise ValidationError( - f"An asset with the name {data['name']} already exists in this account.", - "name", - ) + if "name" in data: + if data.get("account_id") is None: + asset = GenericAsset.query.filter( + GenericAsset.name == data["name"], GenericAsset.account_id.is_(None) + ).first() + if asset: + raise ValidationError( + f"A public asset with the name {data['name']} already exists.", + "name", + ) + else: + asset = GenericAsset.query.filter( + GenericAsset.name == data["name"], + GenericAsset.account_id == data["account_id"], + ).first() + if asset: + raise ValidationError( + f"An asset with the name {data['name']} already exists in this account.", + "name", + ) @validates("generic_asset_type_id") def validate_generic_asset_type(self, generic_asset_type_id: int): @@ -66,7 +78,11 @@ def validate_generic_asset_type(self, generic_asset_type_id: int): ) @validates("account_id") - def validate_account(self, account_id: int): + def validate_account(self, account_id: int | None): + if account_id is None and ( + running_as_cli() or user_has_admin_access(current_user, "update") + ): + return account = Account.query.get(account_id) if not account: raise ValidationError(f"Account with Id {account_id} doesn't exist.") diff --git a/flexmeasures/data/schemas/reporting/aggregation.py b/flexmeasures/data/schemas/reporting/aggregation.py new file mode 100644 index 000000000..a42c6d7b9 --- /dev/null +++ b/flexmeasures/data/schemas/reporting/aggregation.py @@ -0,0 +1,58 @@ +from marshmallow import fields, ValidationError, validates_schema + +from flexmeasures.data.schemas.reporting import ReporterConfigSchema + + +class AggregatorSchema(ReporterConfigSchema): + """Schema for the reporter_config of the AggregatorReporter + + Example: + .. code-block:: json + { + "beliefs_search_configs": [ + { + "sensor": 1, + "source" : 1, + "alias" : "pv" + }, + { + "sensor": 1, + "source" : 2, + "alias" : "consumption" + } + ], + "method" : "sum", + "weights" : { + "pv" : 1.0, + "consumption" : -1.0 + } + } + """ + + method = fields.Str(required=False, dump_default="sum") + weights = fields.Dict(fields.Str(), fields.Float(), required=False) + + @validates_schema + def validate_source(self, data, **kwargs): + + for beliefs_search_config in data["beliefs_search_configs"]: + if "source" not in beliefs_search_config: + raise ValidationError("`source` is a required field.") + + @validates_schema + def validate_weights(self, data, **kwargs): + if "weights" not in data: + return + + # get aliases + aliases = [] + for beliefs_search_config in data["beliefs_search_configs"]: + if "alias" in beliefs_search_config: + aliases.append(beliefs_search_config.get("alias")) + + # check that the aliases in weights are defined + for alias in data.get("weights").keys(): + if alias not in aliases: + raise ValidationError( + f"alias `{alias}` in `weights` is not defined in `beliefs_search_config`" + ) diff --git a/flexmeasures/data/schemas/scheduling/storage.py b/flexmeasures/data/schemas/scheduling/storage.py index e15843459..25f840900 100644 --- a/flexmeasures/data/schemas/scheduling/storage.py +++ b/flexmeasures/data/schemas/scheduling/storage.py @@ -19,6 +19,33 @@ from flexmeasures.utils.unit_utils import ur +class EfficiencyField(QuantityField): + """Field that deserializes to a Quantity with % units. Must be greater than 0% and less than or equal to 100%. + + Examples: + + >>> ef = EfficiencyField() + >>> ef.deserialize(0.9) + + >>> ef.deserialize("90%") + + >>> ef.deserialize("0%") + Traceback (most recent call last): + ... + marshmallow.exceptions.ValidationError: ['Must be greater than 0 and less than or equal to 1.'] + """ + + def __init__(self, *args, **kwargs): + super().__init__( + "%", + validate=validate.Range( + min=0, max=1, min_inclusive=False, max_inclusive=True + ), + *args, + **kwargs, + ) + + class SOCValueSchema(Schema): """ A point in time with a target value. @@ -66,11 +93,8 @@ class StorageFlexModelSchema(Schema): data_key="soc-unit", ) # todo: allow unit to be set per field, using QuantityField("%", validate=validate.Range(min=0, max=1)) soc_targets = fields.List(fields.Nested(SOCValueSchema()), data_key="soc-targets") - roundtrip_efficiency = QuantityField( - "%", - validate=validate.Range(min=0, max=1, min_inclusive=False, max_inclusive=True), - data_key="roundtrip-efficiency", - ) + roundtrip_efficiency = EfficiencyField(data_key="roundtrip-efficiency") + storage_efficiency = EfficiencyField(data_key="storage-efficiency") prefer_charging_sooner = fields.Bool(data_key="prefer-charging-sooner") def __init__(self, start: datetime, sensor: Sensor, *args, **kwargs): @@ -110,10 +134,10 @@ def post_load_sequence(self, data: dict, **kwargs) -> dict: target["value"] /= 1000.0 data["soc_unit"] = "MWh" - # Convert round-trip efficiency to dimensionless (to the (0,1] range) - if data.get("roundtrip_efficiency") is not None: - data["roundtrip_efficiency"] = ( - data["roundtrip_efficiency"].to(ur.Quantity("dimensionless")).magnitude - ) + # Convert efficiencies to dimensionless (to the (0,1] range) + efficiency_fields = ("storage_efficiency", "roundtrip_efficiency") + for field in efficiency_fields: + if data.get(field) is not None: + data[field] = data[field].to(ur.Quantity("dimensionless")).magnitude return data diff --git a/flexmeasures/data/scripts/__init__.py b/flexmeasures/data/scripts/__init__.py index e69de29bb..83e94b860 100644 --- a/flexmeasures/data/scripts/__init__.py +++ b/flexmeasures/data/scripts/__init__.py @@ -0,0 +1,3 @@ +""" +Useful scripts +""" diff --git a/flexmeasures/data/services/__init__.py b/flexmeasures/data/services/__init__.py index e69de29bb..c7f8168cd 100644 --- a/flexmeasures/data/services/__init__.py +++ b/flexmeasures/data/services/__init__.py @@ -0,0 +1,3 @@ +""" +Business logic +""" diff --git a/flexmeasures/data/services/data_sources.py b/flexmeasures/data/services/data_sources.py index d9787f147..74eae3f56 100644 --- a/flexmeasures/data/services/data_sources.py +++ b/flexmeasures/data/services/data_sources.py @@ -13,6 +13,7 @@ def get_or_create_source( source_type: str | None = None, model: str | None = None, version: str | None = None, + attributes: dict | None = None, flush: bool = True, ) -> DataSource: if is_user(source): @@ -22,6 +23,10 @@ def get_or_create_source( query = query.filter(DataSource.model == model) if version is not None: query = query.filter(DataSource.version == version) + if attributes is not None: + query = query.filter( + DataSource.attributes_hash == DataSource.hash_attributes(attributes) + ) if is_user(source): query = query.filter(DataSource.user == source) elif isinstance(source, str): @@ -36,7 +41,11 @@ def get_or_create_source( if source_type is None: raise TypeError("Please specify a source type") _source = DataSource( - name=source, model=model, version=version, type=source_type + name=source, + model=model, + version=version, + type=source_type, + attributes=attributes, ) current_app.logger.info(f"Setting up {_source} as new data source...") db.session.add(_source) diff --git a/flexmeasures/data/services/forecasting.py b/flexmeasures/data/services/forecasting.py index edb9e537d..cb33bef68 100644 --- a/flexmeasures/data/services/forecasting.py +++ b/flexmeasures/data/services/forecasting.py @@ -1,3 +1,7 @@ +""" +Logic around scheduling (jobs) +""" + from __future__ import annotations from datetime import datetime, timedelta diff --git a/flexmeasures/data/services/scheduling.py b/flexmeasures/data/services/scheduling.py index b9f82a111..526085874 100644 --- a/flexmeasures/data/services/scheduling.py +++ b/flexmeasures/data/services/scheduling.py @@ -1,3 +1,7 @@ +""" +Logic around scheduling (jobs) +""" + from __future__ import annotations from datetime import datetime, timedelta diff --git a/flexmeasures/data/services/timerange.py b/flexmeasures/data/services/timerange.py new file mode 100644 index 000000000..d894ed4bf --- /dev/null +++ b/flexmeasures/data/services/timerange.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +from datetime import datetime + +from sqlalchemy import func + +from flexmeasures.utils import time_utils + + +def get_timerange(sensor_ids: list[int]) -> tuple[datetime, datetime]: + """Get the start and end of the least recent and most recent event, respectively. + + In case of no data, defaults to (now, now). + """ + from flexmeasures.data.models.time_series import Sensor, TimedBelief + + least_recent_event_start_and_most_recent_event_end = ( + TimedBelief.query.with_entities( + # least recent event start + func.min(TimedBelief.event_start), + # most recent event end + func.max(TimedBelief.event_start + Sensor.event_resolution), + ) + .join(Sensor, TimedBelief.sensor_id == Sensor.id) + .filter(TimedBelief.sensor_id.in_(sensor_ids)) + ).one_or_none() + if least_recent_event_start_and_most_recent_event_end == (None, None): + # return now in case there is no data for any of the sensors + now = time_utils.server_now() + return now, now + return least_recent_event_start_and_most_recent_event_end diff --git a/flexmeasures/data/services/users.py b/flexmeasures/data/services/users.py index 3a331a3d8..ed786d0e4 100644 --- a/flexmeasures/data/services/users.py +++ b/flexmeasures/data/services/users.py @@ -8,10 +8,10 @@ from flask_security.recoverable import update_password from email_validator import ( validate_email, - validate_email_deliverability, EmailNotValidError, EmailUndeliverableError, ) +from email_validator.deliverability import validate_email_deliverability from flask_security.utils import hash_password from werkzeug.exceptions import NotFound diff --git a/flexmeasures/data/tests/test_scheduling_jobs.py b/flexmeasures/data/tests/test_scheduling_jobs.py index a488fb98c..cdd2f1a4a 100644 --- a/flexmeasures/data/tests/test_scheduling_jobs.py +++ b/flexmeasures/data/tests/test_scheduling_jobs.py @@ -37,6 +37,10 @@ def test_scheduling_a_battery(db, app, add_battery_assets, setup_test_data): end=end, belief_time=start, resolution=resolution, + flex_model={ + "roundtrip-efficiency": "98%", + "storage-efficiency": 0.999, + }, ) print("Job: %s" % job.id) @@ -57,6 +61,9 @@ def test_scheduling_a_battery(db, app, add_battery_assets, setup_test_data): ) print([v.event_value for v in power_values]) assert len(power_values) == 96 + assert ( + sum(v.event_value for v in power_values) < -0.5 + ), "some cycling should have occurred to make a profit, resulting in overall consumption due to losses" scheduler_specs = { diff --git a/flexmeasures/data/tests/test_scheduling_jobs_fresh_db.py b/flexmeasures/data/tests/test_scheduling_jobs_fresh_db.py index ca125614f..02b966a62 100644 --- a/flexmeasures/data/tests/test_scheduling_jobs_fresh_db.py +++ b/flexmeasures/data/tests/test_scheduling_jobs_fresh_db.py @@ -42,7 +42,12 @@ def test_scheduling_a_charging_station( end=end, belief_time=start, resolution=resolution, - flex_model={"soc-at-start": soc_at_start, "soc-targets": soc_targets}, + flex_model={ + "soc-at-start": soc_at_start, + "soc-targets": soc_targets, + "roundtrip-efficiency": "100%", + "storage-efficiency": 1, + }, ) print("Job: %s" % job.id) diff --git a/flexmeasures/data/utils.py b/flexmeasures/data/utils.py index 194f8b10b..4985ab421 100644 --- a/flexmeasures/data/utils.py +++ b/flexmeasures/data/utils.py @@ -1,3 +1,7 @@ +""" +Utils around the data models and db sessions +""" + from __future__ import annotations from flask import current_app diff --git a/flexmeasures/tests/__init__.py b/flexmeasures/tests/__init__.py new file mode 100644 index 000000000..b4b63d249 --- /dev/null +++ b/flexmeasures/tests/__init__.py @@ -0,0 +1 @@ +""" Higher-level tests """ diff --git a/flexmeasures/config/tests/test_package.py b/flexmeasures/tests/test_highlevel_class_imports.py similarity index 100% rename from flexmeasures/config/tests/test_package.py rename to flexmeasures/tests/test_highlevel_class_imports.py diff --git a/flexmeasures/ui/__init__.py b/flexmeasures/ui/__init__.py index 0561e267c..eb205d686 100644 --- a/flexmeasures/ui/__init__.py +++ b/flexmeasures/ui/__init__.py @@ -1,3 +1,7 @@ +""" +Backoffice user interface & charting support. +""" + import os from flask import current_app, Flask, Blueprint diff --git a/flexmeasures/ui/crud/__init__.py b/flexmeasures/ui/crud/__init__.py index e69de29bb..3a4cbd432 100644 --- a/flexmeasures/ui/crud/__init__.py +++ b/flexmeasures/ui/crud/__init__.py @@ -0,0 +1,3 @@ +""" +Backoffice UI for CRUD functionality +""" diff --git a/flexmeasures/ui/templates/base.html b/flexmeasures/ui/templates/base.html index edbf456ee..3198b4a8f 100644 --- a/flexmeasures/ui/templates/base.html +++ b/flexmeasures/ui/templates/base.html @@ -11,6 +11,9 @@ {% endblock head %} + {% if flask_env != 'development' and flask_env != 'testing' %} + + {% endif %} diff --git a/flexmeasures/ui/utils/__init__.py b/flexmeasures/ui/utils/__init__.py index e69de29bb..f886eb4d6 100644 --- a/flexmeasures/ui/utils/__init__.py +++ b/flexmeasures/ui/utils/__init__.py @@ -0,0 +1,3 @@ +""" +Utility functions for UI logic +""" diff --git a/flexmeasures/ui/utils/view_utils.py b/flexmeasures/ui/utils/view_utils.py index ae622ba77..0c9bf07da 100644 --- a/flexmeasures/ui/utils/view_utils.py +++ b/flexmeasures/ui/utils/view_utils.py @@ -25,6 +25,7 @@ def render_flexmeasures_template(html_filename: str, **variables): """Render template and add all expected template variables, plus the ones given as **variables.""" + variables["flask_env"] = current_app.env variables["documentation_exists"] = False if os.path.exists( "%s/static/documentation/html/index.html" % flexmeasures_ui.root_path diff --git a/flexmeasures/utils/app_utils.py b/flexmeasures/utils/app_utils.py index 394c991e3..5d53a59d5 100644 --- a/flexmeasures/utils/app_utils.py +++ b/flexmeasures/utils/app_utils.py @@ -1,3 +1,7 @@ +""" +Utils for serving the FlexMeasures app +""" + from __future__ import annotations import os diff --git a/flexmeasures/utils/calculations.py b/flexmeasures/utils/calculations.py index f85813fa7..993fc6cc6 100644 --- a/flexmeasures/utils/calculations.py +++ b/flexmeasures/utils/calculations.py @@ -1,7 +1,8 @@ -""" Calculations """ +""" Various calculations """ from __future__ import annotations from datetime import timedelta +import math import numpy as np import pandas as pd @@ -37,11 +38,76 @@ def drop_nan_rows(a, b): return d[:, 0], d[:, 1] +def apply_stock_changes_and_losses( + initial: float, + changes: list[float], + storage_efficiency: float | list[float], + how: str = "linear", + decimal_precision: int | None = None, +) -> list[float]: + r"""Assign stock changes and determine losses from storage efficiency. + + The initial stock is exponentially decayed, as with each consecutive (constant-resolution) time step, + some constant percentage of the previous stock remains. For example: + + .. math:: + + 100 \rightarrow 90 \rightarrow 81 \rightarrow 72.9 \rightarrow ... + + For computing the decay of the changes, we make an assumption on how a delta :math:`d` is distributed within a given time step. + In case it happens at a constant rate, this leads to a linear stock change from one time step to the next. + + An :math:`e` is introduced when we apply exponential decay to that. + To see that, imagine we cut one time step in :math:`n` pieces (each with a stock change :math:`\frac{d}{n}` ), + apply the efficiency to each piece :math:`k` (for the corresponding fraction of the time step :math:`k/n`), + and then take the limit :math:`n \rightarrow \infty`: + + .. math:: + + \lim_{n \rightarrow \infty} \sum_{k=0}^{n}{\frac{d}{n} \eta^{k/n}} + + `which is `_: + + .. math:: + + d \cdot \frac{\eta - 1}{e^{\eta}} + + :param initial: initial stock + :param changes: stock change for each step + :param storage_efficiency: ratio of stock left after a step (constant ratio or one per step) + :param how: left, right or linear; how stock changes should be applied, which affects how losses are applied + :param decimal_precision: Optional decimal precision to round off results (useful for tests failing over machine precision) + """ + stocks = [initial] + if not isinstance(storage_efficiency, list): + storage_efficiency = [storage_efficiency] * len(changes) + for d, e in zip(changes, storage_efficiency): + s = stocks[-1] + if e == 1: + next_stock = s + d + elif how == "left": + # First apply the stock change, then apply the losses (i.e. the stock changes on the left side of the time interval in which the losses apply) + next_stock = (s + d) * e + elif how == "right": + # First apply the losses, then apply the stock change (i.e. the stock changes on the right side of the time interval in which the losses apply) + next_stock = s * e + d + elif how == "linear": + # Assume the change happens at a constant rate, leading to a linear stock change, and exponential decay, within the current interval + next_stock = s * e + d * (e - 1) / math.log(e) + else: + raise NotImplementedError(f"Missing implementation for how='{how}'.") + stocks.append(next_stock) + if decimal_precision is not None: + stocks = [round(s, decimal_precision) for s in stocks] + return stocks + + def integrate_time_series( series: pd.Series, initial_stock: float, up_efficiency: float | pd.Series = 1, down_efficiency: float | pd.Series = 1, + storage_efficiency: float | pd.Series = 1, decimal_precision: int | None = None, ) -> pd.Series: """Integrate time series of length n and inclusive="left" (representing a flow) @@ -69,25 +135,42 @@ def integrate_time_series( dtype: float64 """ resolution = pd.to_timedelta(series.index.freq) + storage_efficiency = ( + storage_efficiency + if isinstance(storage_efficiency, pd.Series) + else pd.Series(storage_efficiency, index=series.index) + ) + + # Convert from flow to stock change, applying conversion efficiencies stock_change = pd.Series(data=np.NaN, index=series.index) - stock_change.loc[series > 0] = series[series > 0] * ( - up_efficiency[series > 0] - if isinstance(up_efficiency, pd.Series) - else up_efficiency + stock_change.loc[series > 0] = ( + series[series > 0] + * ( + up_efficiency[series > 0] + if isinstance(up_efficiency, pd.Series) + else up_efficiency + ) + * (resolution / timedelta(hours=1)) ) - stock_change.loc[series <= 0] = series[series <= 0] / ( - down_efficiency[series <= 0] - if isinstance(down_efficiency, pd.Series) - else down_efficiency + stock_change.loc[series <= 0] = ( + series[series <= 0] + / ( + down_efficiency[series <= 0] + if isinstance(down_efficiency, pd.Series) + else down_efficiency + ) + * (resolution / timedelta(hours=1)) + ) + + stocks = apply_stock_changes_and_losses( + initial_stock, stock_change.tolist(), storage_efficiency.tolist() ) - int_s = pd.concat( + stocks = pd.concat( [ pd.Series(initial_stock, index=pd.date_range(series.index[0], periods=1)), - stock_change.shift(1, freq=resolution).cumsum() - * (resolution / timedelta(hours=1)) - + initial_stock, + pd.Series(stocks[1:], index=series.index).shift(1, freq=resolution), ] ) if decimal_precision is not None: - int_s = int_s.round(decimal_precision) - return int_s + stocks = stocks.round(decimal_precision) + return stocks diff --git a/flexmeasures/utils/coding_utils.py b/flexmeasures/utils/coding_utils.py index 7856c87d3..3994a7b72 100644 --- a/flexmeasures/utils/coding_utils.py +++ b/flexmeasures/utils/coding_utils.py @@ -1,3 +1,4 @@ +""" Various coding utils (e.g. around function decoration) """ from __future__ import annotations import functools @@ -126,9 +127,11 @@ def sort_dict(unsorted_dict: dict) -> dict: def flatten_unique(nested_list_of_objects: list) -> list: """Returns unique objects in a possibly nested (one level) list of objects. + Preserves the original order in which unique objects first occurred. + For example: - >>> flatten_unique([1, [2, 3, 4], 3, 5]) - <<< [1, 2, 3, 4, 5] + >>> flatten_unique([1, [2, 20, 6], 10, [6, 2]]) + <<< [1, 2, 20, 6, 10] """ all_objects = [] for s in nested_list_of_objects: @@ -136,7 +139,7 @@ def flatten_unique(nested_list_of_objects: list) -> list: all_objects.extend(s) else: all_objects.append(s) - return list(set(all_objects)) + return list(dict.fromkeys(all_objects).keys()) def timeit(func): @@ -173,34 +176,47 @@ def wrapper(*args, **kwargs): return decorator -def find_classes_module(module, superclass, skiptest=True): +def find_classes_module(module, superclass): + classes = [] + + module_object = importlib.import_module(f"{module}") + module_classes = inspect.getmembers(module_object, inspect.isclass) + + classes.extend( + [ + (class_name, klass) + for class_name, klass in module_classes + if issubclass(klass, superclass) and klass != superclass + ] + ) + + return classes + + +def find_classes_modules(module, superclass, skiptest=True): classes = [] - reporting_module = importlib.import_module(module) - for submodule in pkgutil.iter_modules(reporting_module.__path__): + base_module = importlib.import_module(module) + + # root (__init__.py) of the base module + classes += find_classes_module(module, superclass) + + for submodule in pkgutil.iter_modules(base_module.__path__): if skiptest and ("test" in f"{module}.{submodule.name}"): continue if submodule.ispkg: classes.extend( - find_classes_module( + find_classes_modules( f"{module}.{submodule.name}", superclass, skiptest=skiptest ) ) else: - module_object = importlib.import_module(f"{module}.{submodule.name}") - module_classes = inspect.getmembers(module_object, inspect.isclass) - classes.extend( - [ - (class_name, klass) - for class_name, klass in module_classes - if issubclass(klass, superclass) and klass != superclass - ] - ) + classes += find_classes_module(f"{module}.{submodule.name}", superclass) return classes def get_classes_module(module, superclass, skiptest=True) -> dict: - return dict(find_classes_module(module, superclass, skiptest=skiptest)) + return dict(find_classes_modules(module, superclass, skiptest=skiptest)) diff --git a/flexmeasures/utils/config_defaults.py b/flexmeasures/utils/config_defaults.py index a8f10180a..cd5092cad 100644 --- a/flexmeasures/utils/config_defaults.py +++ b/flexmeasures/utils/config_defaults.py @@ -1,14 +1,16 @@ -from __future__ import annotations - -from datetime import timedelta -import logging - """ -This lays out our configuration requirements and allows to set trivial defaults, per environment adjustable. +Our configuration requirements and defaults + +This can be adjusted per environment here. Anything confidential should be handled outside of source control (e.g. a SECRET KEY file is generated on first install, and confidential settings can be set via the -conf.py file. """ +from __future__ import annotations + +from datetime import timedelta +import logging + class Config(object): """ diff --git a/flexmeasures/utils/config_utils.py b/flexmeasures/utils/config_utils.py index 0265ebb28..9f7ac6acb 100644 --- a/flexmeasures/utils/config_utils.py +++ b/flexmeasures/utils/config_utils.py @@ -1,3 +1,7 @@ +""" +Reading in configuration +""" + from __future__ import annotations import os @@ -81,7 +85,7 @@ def read_config(app: Flask, custom_path_to_config: str | None): path_to_config_home = str(Path.home().joinpath(".flexmeasures.cfg")) path_to_config_instance = os.path.join(app.instance_path, "flexmeasures.cfg") - # Custom config: not when testing (that should run completely on defaults) + # Custom config: do not use any when testing (that should run completely on defaults) if not app.testing: used_path_to_config = read_custom_config( app, custom_path_to_config, path_to_config_home, path_to_config_instance diff --git a/flexmeasures/utils/error_utils.py b/flexmeasures/utils/error_utils.py index faa8544de..131d970af 100644 --- a/flexmeasures/utils/error_utils.py +++ b/flexmeasures/utils/error_utils.py @@ -1,3 +1,7 @@ +""" +Utils for handling of errors +""" + import re import sys import traceback diff --git a/flexmeasures/utils/flexmeasures_inflection.py b/flexmeasures/utils/flexmeasures_inflection.py index eaa825cad..dbc7386b3 100644 --- a/flexmeasures/utils/flexmeasures_inflection.py +++ b/flexmeasures/utils/flexmeasures_inflection.py @@ -1,3 +1,5 @@ +""" FlexMeasures way of handling inflection """ + from __future__ import annotations import re diff --git a/flexmeasures/utils/plugin_utils.py b/flexmeasures/utils/plugin_utils.py index 6cab82062..19568b48a 100644 --- a/flexmeasures/utils/plugin_utils.py +++ b/flexmeasures/utils/plugin_utils.py @@ -1,3 +1,7 @@ +""" +Utils for registering FlexMeasures plugins +""" + from __future__ import annotations import importlib.util @@ -102,10 +106,12 @@ def register_plugins(app: Flask): app.logger.debug(f"Registering {plugin_blueprint} ...") app.register_blueprint(plugin_blueprint) - # Loading reporters + # Load reporters and schedulers from flexmeasures.data.models.reporting import Reporter + from flexmeasures.data.models.planning import Scheduler app.reporters.update(get_classes_module(module.__name__, Reporter)) + app.schedulers.update(get_classes_module(module.__name__, Scheduler)) app.config["LOADED_PLUGINS"][plugin_name] = plugin_version app.logger.info(f"Loaded plugins: {app.config['LOADED_PLUGINS']}") diff --git a/flexmeasures/utils/time_utils.py b/flexmeasures/utils/time_utils.py index b0a490c4b..5a7b4c880 100644 --- a/flexmeasures/utils/time_utils.py +++ b/flexmeasures/utils/time_utils.py @@ -1,3 +1,7 @@ +""" +Utils for dealing with time +""" + from __future__ import annotations import re diff --git a/flexmeasures/utils/unit_utils.py b/flexmeasures/utils/unit_utils.py index 4edd67460..13a110050 100644 --- a/flexmeasures/utils/unit_utils.py +++ b/flexmeasures/utils/unit_utils.py @@ -200,6 +200,7 @@ def is_energy_price_unit(unit: str) -> bool: """ if ( unit[:3] in [str(c) for c in list_all_currencies()] + and len(unit) > 3 and unit[3] == "/" and is_energy_unit(unit[4:]) ): diff --git a/requirements/app.in b/requirements/app.in index af292a2c1..2809f4ec9 100644 --- a/requirements/app.in +++ b/requirements/app.in @@ -9,7 +9,9 @@ iso8601 xlrd workalendar inflection -inflect +# <6.0.2 and pydantic due to https://github.com/jaraco/inflect/issues/187 +inflect<=6.0.2 +pydantic<2 humanize psycopg2-binary bcrypt @@ -21,6 +23,7 @@ click click-default-group email_validator rq +# rq-dashboard-compatible # use this if compatibility with Flask 2.3 is not done rq-dashboard # the following uses environment markers (see PEP 496) rq-win; os_name == 'nt' or os_name == 'win' @@ -29,23 +32,28 @@ redis >4.5, <5 tldextract pyomo>=5.6 tabulate -timetomodel>=0.7.1 -timely-beliefs[forecast]>=1.18 +timetomodel>=0.7.3 +timely-beliefs[forecast]>=1.20.1 python-dotenv # a backport, not needed in Python3.8 importlib_metadata # see GH#607 for issue on this pin sqlalchemy>=1.4.0, <2 Flask-SSLify +# use <0.4 if not compatible with Flask2.2 yet Flask_JSON Flask-Migrate Flask-WTF Flask-Mail -Flask-Security-Too>=5.0 -# This pin is tough to debug, but logging in (in API) stops working at 0.6.2. Maybe Flask 2.2 will help resolve this. -Flask-Login <= 0.6.1 -Flask-Classful -Flask-Marshmallow +# <5.2: https://github.com/Parallels/rq-dashboard/issues/417 as 5.2 requires Flask 2.3 +Flask-Security-Too>=5.0, <5.2 +# This pin is tough to debug, but logging in (in API) stops working at 0.6.2. +# On repeated call to API with auth token, user is anonymous. +# See test_get_one_user or test_api_task_run_post_unauthorized_wrong_role for a good example +Flask-Login<0.6.2 +Flask-Classful @ git+https://github.com/superseed/flask-classful@werkzeug-2.2 +# <0.15: https://github.com/marshmallow-code/flask-marshmallow/issues/262 +Flask-Marshmallow<0.15 Flask-Cors sentry-sdk[flask] marshmallow>=3 @@ -57,5 +65,6 @@ uniplot>=0.7.0 # Maximum constraints here due to Flask-Classful not supporting Werkzeug 2.2.0 yet, see GH#595 and https://github.com/teracyhq/flask-classful/pull/145 Flask-SQLAlchemy>=2.4.3,<3 # flask should be after all the flask plugins, because setup might find they ARE flask -flask>=1.0,<=2.1.2 -werkzeug <2.1 +# <2.3: https://github.com/Parallels/rq-dashboard/issues/417 and https://github.com/FlexMeasures/flexmeasures/issues/754 and flask-login 0.6.1 not compatible +flask>=1.0, <=2.1.2 +werkzeug<=2.1 diff --git a/requirements/app.txt b/requirements/app.txt index 215052b2e..340a23f4d 100644 --- a/requirements/app.txt +++ b/requirements/app.txt @@ -1,30 +1,30 @@ # -# This file is autogenerated by pip-compile with python 3.9 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: # # pip-compile --output-file=requirements/app.txt requirements/app.in # -alembic==1.10.2 +alembic==1.11.1 # via flask-migrate -altair==4.2.2 +altair==5.0.1 # via -r requirements/app.in arrow==1.2.3 # via rq-dashboard async-timeout==4.0.2 # via redis -attrs==22.2.0 +attrs==23.1.0 # via jsonschema babel==2.12.1 # via py-moneyed bcrypt==4.0.1 # via -r requirements/app.in -blinker==1.5 +blinker==1.6.2 # via # flask-mail # flask-principal # flask-security-too # sentry-sdk -certifi==2022.12.7 +certifi==2023.5.7 # via # requests # sentry-sdk @@ -40,25 +40,23 @@ click-default-group==1.2.2 # via -r requirements/app.in colour==0.1.5 # via -r requirements/app.in -contourpy==1.0.7 +contourpy==1.1.0 # via matplotlib convertdate==2.4.0 # via workalendar cycler==0.11.0 # via matplotlib -deprecated==1.2.13 +deprecated==1.2.14 # via sktime dill==0.3.6 # via openturns dnspython==2.3.0 # via email-validator -email-validator==1.3.1 +email-validator==2.0.0.post2 # via # -r requirements/app.in # flask-security-too -entrypoints==0.4 - # via altair -filelock==3.10.7 +filelock==3.12.2 # via tldextract flask==2.1.2 # via @@ -77,9 +75,9 @@ flask==2.1.2 # flask-wtf # rq-dashboard # sentry-sdk -flask-classful==0.14.2 +flask-classful @ git+https://github.com/superseed/flask-classful@werkzeug-2.2 # via -r requirements/app.in -flask-cors==3.0.10 +flask-cors==4.0.0 # via -r requirements/app.in flask-json==0.3.5 # via -r requirements/app.in @@ -107,29 +105,26 @@ flask-wtf==1.1.1 # via # -r requirements/app.in # flask-security-too -fonttools==4.39.3 +fonttools==4.40.0 # via matplotlib greenlet==2.0.2 # via sqlalchemy -humanize==4.6.0 +humanize==4.7.0 # via -r requirements/app.in idna==3.4 # via # email-validator # requests # tldextract -importlib-metadata==6.1.0 +importlib-metadata==6.7.0 # via # -r requirements/app.in - # flask # timely-beliefs -importlib-resources==5.12.0 - # via matplotlib inflect==6.0.2 # via -r requirements/app.in inflection==0.5.1 # via -r requirements/app.in -iso8601==1.1.0 +iso8601==2.0.0 # via -r requirements/app.in isodate==0.6.1 # via @@ -144,22 +139,21 @@ jinja2==3.1.2 # via # altair # flask -joblib==1.2.0 +joblib==1.3.1 # via scikit-learn jsonschema==4.17.3 # via altair kiwisolver==1.4.4 # via matplotlib -llvmlite==0.39.1 - # via numba lunardate==0.2.0 # via workalendar mako==1.2.4 # via alembic -markupsafe==2.1.2 +markupsafe==2.1.3 # via # jinja2 # mako + # sentry-sdk # wtforms marshmallow==3.19.0 # via @@ -174,15 +168,12 @@ marshmallow-sqlalchemy==0.29.0 # via -r requirements/app.in matplotlib==3.7.1 # via timetomodel -numba==0.56.4 - # via sktime numpy==1.23.5 # via # -r requirements/app.in # altair # contourpy # matplotlib - # numba # pandas # patsy # properscoring @@ -193,16 +184,17 @@ numpy==1.23.5 # timely-beliefs # timetomodel # uniplot -openturns==1.20.post3 +openturns==1.21 # via timely-beliefs -packaging==23.0 +packaging==23.1 # via # marshmallow # marshmallow-sqlalchemy # matplotlib + # sktime # statsmodels # webargs -pandas==1.5.3 +pandas==2.0.3 # via # -r requirements/app.in # altair @@ -214,9 +206,9 @@ passlib==1.7.4 # via flask-security-too patsy==0.5.3 # via statsmodels -pillow==9.4.0 +pillow==9.5.0 # via matplotlib -pint==0.20.1 +pint==0.22 # via -r requirements/app.in ply==3.11 # via pyomo @@ -224,23 +216,25 @@ properscoring==0.1 # via timely-beliefs pscript==0.7.7 # via -r requirements/app.in -psutil==5.9.4 +psutil==5.9.5 # via openturns -psycopg2-binary==2.9.5 +psycopg2-binary==2.9.6 # via # -r requirements/app.in # timely-beliefs py-moneyed==3.0 # via -r requirements/app.in -pydantic==1.10.7 - # via inflect +pydantic==1.10.10 + # via + # -r requirements/app.in + # inflect pyluach==2.2.0 # via workalendar pymeeus==0.5.12 # via convertdate -pyomo==6.5.0 +pyomo==6.6.1 # via -r requirements/app.in -pyparsing==3.0.9 +pyparsing==3.1.0 # via matplotlib pyrsistent==0.19.3 # via jsonschema @@ -259,28 +253,30 @@ pytz==2023.3 # pandas # timely-beliefs # timetomodel -redis==4.5.4 +redis==4.6.0 # via # -r requirements/app.in # rq # rq-dashboard -requests==2.28.2 +requests==2.31.0 # via # requests-file # tldextract requests-file==1.5.1 # via tldextract -rq==1.13.0 +rq==1.15.1 # via # -r requirements/app.in # rq-dashboard rq-dashboard==0.6.1 # via -r requirements/app.in +scikit-base==0.5.0 + # via sktime scikit-learn==1.2.2 # via # sktime # timetomodel -scipy==1.10.1 +scipy==1.11.1 # via # properscoring # scikit-learn @@ -288,19 +284,18 @@ scipy==1.10.1 # statsmodels # timely-beliefs # timetomodel -sentry-sdk[flask]==1.18.0 +sentry-sdk[flask]==1.26.0 # via -r requirements/app.in six==1.16.0 # via - # flask-cors # flask-marshmallow # isodate # patsy # python-dateutil # requests-file -sktime==0.16.1 +sktime==0.20.0 # via timely-beliefs -sqlalchemy==1.4.47 +sqlalchemy==1.4.48 # via # -r requirements/app.in # alembic @@ -308,34 +303,38 @@ sqlalchemy==1.4.47 # marshmallow-sqlalchemy # timely-beliefs # timetomodel -statsmodels==0.13.5 +statsmodels==0.14.0 # via timetomodel tabulate==0.9.0 # via -r requirements/app.in threadpoolctl==3.1.0 # via scikit-learn -timely-beliefs[forecast]==1.19.0 +timely-beliefs[forecast]==1.21.0 # via -r requirements/app.in -timetomodel==0.7.2 +timetomodel==0.7.3 # via -r requirements/app.in -tldextract==3.4.0 +tldextract==3.4.4 # via -r requirements/app.in toolz==0.12.0 # via altair -typing-extensions==4.5.0 +typing-extensions==4.7.0 # via # alembic + # altair + # pint # py-moneyed # pydantic +tzdata==2023.3 + # via pandas uniplot==0.10.0 # via -r requirements/app.in -urllib3==1.26.15 +urllib3==2.0.3 # via # requests # sentry-sdk webargs==8.2.0 # via -r requirements/app.in -werkzeug==2.0.3 +werkzeug==2.1.0 # via # -r requirements/app.in # flask @@ -351,9 +350,7 @@ wtforms==3.0.1 xlrd==2.0.1 # via -r requirements/app.in zipp==3.15.0 - # via - # importlib-metadata - # importlib-resources + # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/dev.in b/requirements/dev.in index e1f392c32..63c71088b 100644 --- a/requirements/dev.in +++ b/requirements/dev.in @@ -10,4 +10,5 @@ flake8-blind-except mypy pytest-runner setuptools_scm -watchdog \ No newline at end of file +watchdog +pyinstrument \ No newline at end of file diff --git a/requirements/dev.txt b/requirements/dev.txt index 6e019a511..332320639 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.9 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: # # pip-compile --output-file=requirements/dev.txt requirements/dev.in # @@ -15,7 +15,7 @@ click==8.1.3 # black distlib==0.3.6 # via virtualenv -filelock==3.10.7 +filelock==3.12.2 # via # -c requirements/app.txt # virtualenv @@ -23,35 +23,37 @@ flake8==4.0.1 # via -r requirements/dev.in flake8-blind-except==0.2.1 # via -r requirements/dev.in -identify==2.5.22 +identify==2.5.24 # via pre-commit mccabe==0.6.1 # via flake8 -mypy==1.1.1 +mypy==1.4.1 # via -r requirements/dev.in mypy-extensions==1.0.0 # via # black # mypy -nodeenv==1.7.0 +nodeenv==1.8.0 # via pre-commit -packaging==23.0 +packaging==23.1 # via # -c requirements/app.txt # -c requirements/test.txt # setuptools-scm pathspec==0.11.1 # via black -platformdirs==3.2.0 +platformdirs==3.8.0 # via # black # virtualenv -pre-commit==3.2.1 +pre-commit==3.3.3 # via -r requirements/dev.in pycodestyle==2.8.0 # via flake8 pyflakes==2.4.0 # via flake8 +pyinstrument==4.5.0 + # via -r requirements/dev.in pytest-runner==6.0.0 # via -r requirements/dev.in pyyaml==6.0 @@ -64,13 +66,12 @@ tomli==2.0.1 # black # mypy # setuptools-scm -typing-extensions==4.5.0 +typing-extensions==4.7.0 # via # -c requirements/app.txt - # black # mypy # setuptools-scm -virtualenv==20.21.0 +virtualenv==20.23.1 # via pre-commit watchdog==3.0.0 # via -r requirements/dev.in diff --git a/requirements/docs.txt b/requirements/docs.txt index f185451c4..722bf528c 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.9 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: # # pip-compile --output-file=requirements/docs.txt requirements/docs.in # @@ -10,7 +10,7 @@ babel==2.12.1 # via # -c requirements/app.txt # sphinx -certifi==2022.12.7 +certifi==2023.5.7 # via # -c requirements/app.txt # requests @@ -29,27 +29,23 @@ idna==3.4 # requests imagesize==1.4.1 # via sphinx -importlib-metadata==6.1.0 - # via - # -c requirements/app.txt - # sphinx jinja2==3.1.2 # via # -c requirements/app.txt # sphinx -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/app.txt # jinja2 -packaging==23.0 +packaging==23.1 # via # -c requirements/app.txt # sphinx -pygments==2.14.0 +pygments==2.15.1 # via # sphinx # sphinx-tabs -requests==2.28.2 +requests==2.31.0 # via # -c requirements/app.txt # sphinx @@ -59,7 +55,7 @@ six==1.16.0 # sphinxcontrib-httpdomain snowballstemmer==2.2.0 # via sphinx -sphinx==6.1.3 +sphinx==6.2.1 # via # -r requirements/docs.in # sphinx-copybutton @@ -68,11 +64,11 @@ sphinx==6.1.3 # sphinx-tabs # sphinxcontrib-httpdomain # sphinxcontrib-jquery -sphinx-copybutton==0.5.1 +sphinx-copybutton==0.5.2 # via -r requirements/docs.in sphinx-fontawesome==0.0.6 # via -r requirements/docs.in -sphinx-rtd-theme==1.2.0 +sphinx-rtd-theme==1.2.2 # via -r requirements/docs.in sphinx-tabs==3.4.1 # via -r requirements/docs.in @@ -92,11 +88,7 @@ sphinxcontrib-qthelp==1.0.3 # via sphinx sphinxcontrib-serializinghtml==1.1.5 # via sphinx -urllib3==1.26.15 +urllib3==2.0.3 # via # -c requirements/app.txt # requests -zipp==3.15.0 - # via - # -c requirements/app.txt - # importlib-metadata diff --git a/requirements/test.txt b/requirements/test.txt index 827b7d973..3dc640eaa 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.9 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: # # pip-compile --output-file=requirements/test.txt requirements/test.in # @@ -8,11 +8,7 @@ async-timeout==4.0.2 # via # -c requirements/app.txt # redis -attrs==22.2.0 - # via - # -c requirements/app.txt - # pytest -certifi==2022.12.7 +certifi==2023.5.7 # via # -c requirements/app.txt # requests @@ -24,11 +20,11 @@ click==8.1.3 # via # -c requirements/app.txt # flask -coverage[toml]==7.2.2 +coverage[toml]==7.2.7 # via pytest-cov exceptiongroup==1.1.1 # via pytest -fakeredis==2.10.2 +fakeredis==2.15.0 # via -r requirements/test.in flask==2.1.2 # via @@ -38,10 +34,6 @@ idna==3.4 # via # -c requirements/app.txt # requests -importlib-metadata==6.1.0 - # via - # -c requirements/app.txt - # flask iniconfig==2.0.0 # via pytest itsdangerous==2.1.2 @@ -52,41 +44,41 @@ jinja2==3.1.2 # via # -c requirements/app.txt # flask -lupa==1.14.1 +lupa==2.0 # via -r requirements/test.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/app.txt # jinja2 -packaging==23.0 +packaging==23.1 # via # -c requirements/app.txt # pytest # pytest-sugar -pluggy==1.0.0 +pluggy==1.2.0 # via pytest -pytest==7.2.2 +pytest==7.4.0 # via # -r requirements/test.in # pytest-cov # pytest-flask # pytest-sugar -pytest-cov==4.0.0 +pytest-cov==4.1.0 # via -r requirements/test.in pytest-flask==1.2.0 # via -r requirements/test.in -pytest-sugar==0.9.6 +pytest-sugar==0.9.7 # via -r requirements/test.in -redis==4.5.4 +redis==4.6.0 # via # -c requirements/app.txt # fakeredis -requests==2.28.2 +requests==2.31.0 # via # -c requirements/app.txt # -r requirements/test.in # requests-mock -requests-mock==1.10.0 +requests-mock==1.11.0 # via -r requirements/test.in six==1.16.0 # via @@ -94,22 +86,18 @@ six==1.16.0 # requests-mock sortedcontainers==2.4.0 # via fakeredis -termcolor==2.2.0 +termcolor==2.3.0 # via pytest-sugar tomli==2.0.1 # via # coverage # pytest -urllib3==1.26.15 +urllib3==2.0.3 # via # -c requirements/app.txt # requests -werkzeug==2.0.3 +werkzeug==2.1.0 # via # -c requirements/app.txt # flask # pytest-flask -zipp==3.15.0 - # via - # -c requirements/app.txt - # importlib-metadata