diff --git a/.gitattributes b/.gitattributes index cbac638a7..51769a21f 100644 --- a/.gitattributes +++ b/.gitattributes @@ -3,3 +3,4 @@ src/test/resources/edu/ie3/datamodel/io/source/influxdb/_weather/cosmo/weather.txt eol=lf src/test/resources/edu/ie3/datamodel/io/source/influxdb/_weather/icon/weather.txt eol=lf +gradlew eol=lf diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 3ba92939b..522a1ae52 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -13,10 +13,6 @@ updates: - sebastian-peter - danielfeismann - jo-bao - ignore: - - dependency-name: org.spockframework:spock-core - versions: - - 2.3-groovy-4.0 - package-ecosystem: pip directory: "/docs/readthedocs" diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 90e935b33..e169b21e7 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -7,9 +7,9 @@ version: 2 # Set the version of Python and other tools you might need build: - os: ubuntu-20.04 + os: ubuntu-22.04 tools: - python: "3.9" + python: "3.11" # Configure python python: @@ -18,4 +18,5 @@ python: # Build documentation in the docs/ directory with Sphinx sphinx: - configuration: docs/readthedocs/conf.py \ No newline at end of file + configuration: docs/readthedocs/conf.py + fail_on_warning: true diff --git a/CHANGELOG.md b/CHANGELOG.md index 046e514b6..168dafc71 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,27 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased/Snapshot] +## [4.0.0] - 2023-08-01 + +### Added +- Copy methods for container classes [#726](https://github.com/ie3-institute/PowerSystemDataModel/issues/726) +- Allow hierarchic grid structure for JointGridContainer [#768](https://github.com/ie3-institute/PowerSystemDataModel/issues/768) +- Adding SQL id coordinate sources (``IdCoordinateSource``) [#689](https://github.com/ie3-institute/PowerSystemDataModel/issues/689) +- Added some standard asset types to documentation [#642](https://github.com/ie3-institute/PowerSystemDataModel/issues/642) + +### Fixed +- Fixed wrong rated power unit hint [#804](https://github.com/ie3-institute/PowerSystemDataModel/issues/804) +- Fixed wrong hash code generation of ConnectorResult [#817](https://github.com/ie3-institute/PowerSystemDataModel/issues/817) + +### Changed +- Removing deprecated classes and methods [#540](https://github.com/ie3-institute/PowerSystemDataModel/issues/540) +- Refactor CSV data sources [#716](https://github.com/ie3-institute/PowerSystemDataModel/issues/716) +- Deleted parameter initFiles, set parameter append to false by default [#791](https://github.com/ie3-institute/PowerSystemDataModel/issues/791) +- Use nio paths instead of strings for file path [#723](https://github.com/ie3-institute/PowerSystemDataModel/issues/723) +- Data source will throw an exceptions instead of returning an empty optionals [#707](https://github.com/ie3-institute/PowerSystemDataModel/issues/707) +- Improving `ValidationUtils` [#758](https://github.com/ie3-institute/PowerSystemDataModel/issues/758) + + ## [3.0.0] - 2023-02-16 ### Added @@ -25,26 +46,26 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed - Reduced code smells [#492](https://github.com/ie3-institute/PowerSystemDataModel/issues/492) - - Protected constructors for abstract classes - - Use pattern matching - - Remove unused imports - - Use enhanced switch statements - - Replace lambdas with method references - - Use `Stream#toList` - - Adapt visibility for JUnit 5 + - Protected constructors for abstract classes + - Use pattern matching + - Remove unused imports + - Use enhanced switch statements + - Replace lambdas with method references + - Use `Stream#toList` + - Adapt visibility for JUnit 5 - More code smell fixing [#633](https://github.com/ie3-institute/PowerSystemDataModel/issues/633) - - Use `List#of` - - Use direct assignment with switch/case structures - - Turn some classes into records - - Making abstract classes' constructor protected - - Improving some RegExs - - Replacing `filter(Optional::isPresent).map(Optional::get)` on streams with `flatMap(Optional::stream)` - - instanceof variable declarations - - Removing unnecessary parentheses - - Miscellaneous code smells + - Use `List#of` + - Use direct assignment with switch/case structures + - Turn some classes into records + - Making abstract classes' constructor protected + - Improving some RegExs + - Replacing `filter(Optional::isPresent).map(Optional::get)` on streams with `flatMap(Optional::stream)` + - instanceof variable declarations + - Removing unnecessary parentheses + - Miscellaneous code smells - Fix JavaDoc creation - - Create JavaDoc with java 17 instead of java 8 - - Let JavDoc pass, if there are warnings **ATTENTION:** Should be removed, when JavaDoc is fixed! (cf. Issue [#494](https://github.com/ie3-institute/PowerSystemDataModel/issues/494)) + - Create JavaDoc with java 17 instead of java 8 + - Let JavDoc pass, if there are warnings **ATTENTION:** Should be removed, when JavaDoc is fixed! (cf. Issue [#494](https://github.com/ie3-institute/PowerSystemDataModel/issues/494)) - `BufferedCsvWriter` writes columns in the order, that the headline elements are defined [#434](https://github.com/ie3-institute/PowerSystemDataModel/issues/393) - Cleaned up `IndividualTimeSeriesMetaInformation`-related methods in `CsvFileConnector` [#544](https://github.com/ie3-institute/PowerSystemDataModel/issues/544) - Fixed spotlessApply handling for `.groovy` files [#637](https://github.com/ie3-institute/PowerSystemDataModel/issues/637) @@ -65,18 +86,19 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `edu.ie3.datamodel.io.connectors.CsvFileConnector.CsvIndividualTimeSeriesMetaInformation` - and related methods - BREAKING: Comprehensive harmonization around weather sources [#267](https://github.com/ie3-institute/PowerSystemDataModel/issues/267) - - Adapted the expected column scheme - - General weather model - - `coordinate` to `coordinateid` - - DWD COSMO model - - `diffuseirradiation` to `diffuseirradiance` - - `directirradiation` to `directirradiance` - - ICON model: - - `"datum"` to `"time"` - - Force user to provide time stamp pattern to `CouchbaseWeatherSource` to ensure harmonized querying + - Adapted the expected column scheme + - General weather model + - `coordinate` to `coordinateid` + - DWD COSMO model + - `diffuseirradiation` to `diffuseirradiance` + - `directirradiation` to `directirradiance` + - ICON model: + - `"datum"` to `"time"` + - Force user to provide time stamp pattern to `CouchbaseWeatherSource` to ensure harmonized querying - BREAKING: Updating PowerSystemUtils dependency to 2.0-SNAPSHOT [#595](https://github.com/ie3-institute/PowerSystemDataModel/issues/595) - BREAKING: Generified the `LoadInput` attribute `standardLoadProfile` to `loadProfile` as it should also address the newly added `TemperatureDependantLoadProfile`s [#601](https://github.com/ie3-institute/PowerSystemDataModel/issues/601) - Adapted to new double converters in PSU [#705](https://github.com/ie3-institute/PowerSystemDataModel/issues/705) +- Setting fixed groovy version and updating groovy [#788](https://github.com/ie3-institute/PowerSystemDataModel/issues/788) ## [2.1.0] - 2022-01-05 @@ -92,6 +114,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Writers used to write time series are closed right away - Changed class name in FlexOptionsResult.toString [#693](https://github.com/ie3-institute/PowerSystemDataModel/issues/693) - Deleted parameter decimalPlaces and changed naming of serialization method [#710](https://github.com/ie3-institute/PowerSystemDataModel/issues/710) +- Changed switch result documentation according to the implementation [#757](https://github.com/ie3-institute/PowerSystemDataModel/issues/757) +- Added documentation for EmResult and FlexOptionResult [#656](https://github.com/ie3-institute/PowerSystemDataModel/issues/656) ## [2.0.1] - 2021-07-08 @@ -195,7 +219,8 @@ coordinates or multiple exactly equal coordinates possible - CsvDataSource now stops trying to get an operator for empty operator uuid field in entities - CsvDataSource now parsing multiple geoJson strings correctly -[Unreleased/Snapshot]: https://github.com/ie3-institute/powersystemdatamodel/compare/3.0.0...HEAD +[Unreleased/Snapshot]: https://github.com/ie3-institute/powersystemdatamodel/compare/4.0.0...HEAD +[4.0.0]: https://github.com/ie3-institute/powersystemdatamodel/compare/3.0.0...4.0.0 [3.0.0]: https://github.com/ie3-institute/powersystemdatamodel/compare/2.1.0...3.0.0 [2.1.0]: https://github.com/ie3-institute/powersystemdatamodel/compare/2.0.1...2.1.0 [2.0.1]: https://github.com/ie3-institute/powersystemdatamodel/compare/2.0.0...2.0.1 diff --git a/build.gradle b/build.gradle index efe690fb9..6a804d47d 100644 --- a/build.gradle +++ b/build.gradle @@ -4,19 +4,21 @@ plugins { id 'maven-publish' id 'signing' id 'pmd' // code check, working on source code - id 'com.diffplug.spotless' version '6.15.0'//code format - id 'com.github.spotbugs' version '5.0.13' // code check, working on byte code - id 'de.undercouch.download' version '5.3.1' + id 'com.diffplug.spotless' version '6.20.0'//code format + id 'com.github.spotbugs' version '5.1.0' // code check, working on byte code + id 'de.undercouch.download' version '5.4.0' id 'kr.motd.sphinx' version '2.10.1' // documentation generation id 'jacoco' // java code coverage plugin - id "org.sonarqube" version "3.5.0.2730" // sonarqube + id "org.sonarqube" version "4.3.0.3225" // sonarqube id 'net.thauvin.erik.gradle.semver' version '1.0.4' // semantic versioning } ext { //version (changing these should be considered thoroughly!) javaVersion = JavaVersion.VERSION_17 - testcontainersVersion = '1.17.6' + groovyVersion = "4.0" + groovyBinaryVersion = "4.0.13" + testcontainersVersion = '1.18.3' scriptsLocation = 'gradle' + File.separator + 'scripts' + File.separator //location of script plugins } @@ -43,7 +45,6 @@ repositories { // sonatype snapshot repo maven { url 'https://s01.oss.sonatype.org/content/repositories/snapshots' } - } dependencies { @@ -51,7 +52,7 @@ dependencies { // ie³ power system utils implementation 'com.github.ie3-institute:PowerSystemUtils:2.0' - implementation 'tech.units:indriya:2.1.4' + implementation 'tech.units:indriya:2.2' // JTS Topology Suite for GeoPositions, License: EPL 1.0 / EDL 1.0 implementation ('org.locationtech.jts:jts-core:1.19.0'){ @@ -61,13 +62,15 @@ dependencies { implementation 'org.locationtech.jts.io:jts-io-common:1.19.0' // Graphs - implementation 'org.jgrapht:jgrapht-core:1.5.1' + implementation 'org.jgrapht:jgrapht-core:1.5.2' // testing - testImplementation 'org.junit.jupiter:junit-jupiter:5.9.2' - testImplementation 'org.spockframework:spock-core:2.3-groovy-3.0' + testImplementation "org.apache.groovy:groovy:$groovyBinaryVersion" + + testImplementation 'org.junit.jupiter:junit-jupiter:5.10.0' + testImplementation "org.spockframework:spock-core:2.3-groovy-$groovyVersion" testImplementation 'org.objenesis:objenesis:3.3' // Mock creation with constructor parameters - testImplementation 'net.bytebuddy:byte-buddy:1.13.0' // Mocks of classes + testImplementation 'net.bytebuddy:byte-buddy:1.14.5' // Mocks of classes // testcontainers (docker framework for testing) testImplementation "org.testcontainers:testcontainers:$testcontainersVersion" @@ -77,20 +80,19 @@ dependencies { testImplementation "org.testcontainers:couchbase:$testcontainersVersion" // logging - implementation platform('org.apache.logging.log4j:log4j-bom:2.19.0') + implementation platform('org.apache.logging.log4j:log4j-bom:2.20.0') implementation 'org.apache.logging.log4j:log4j-api' // log4j implementation 'org.apache.logging.log4j:log4j-core' // log4j implementation 'org.apache.logging.log4j:log4j-slf4j-impl' // log4j -> slf4j // Databases implementation 'org.influxdb:influxdb-java:2.23' - implementation 'com.couchbase.client:java-client:3.4.3' - runtimeOnly 'org.postgresql:postgresql:42.5.3' // postgresql jdbc driver required during runtime - - implementation 'commons-io:commons-io:2.11.0' // I/O functionalities - implementation 'org.apache.commons:commons-compress:1.22' // I/O functionalities - implementation 'org.apache.commons:commons-lang3:3.12.0' + implementation 'com.couchbase.client:java-client:3.4.8' + runtimeOnly 'org.postgresql:postgresql:42.6.0' // postgresql jdbc driver required during runtime + implementation 'commons-io:commons-io:2.13.0' // I/O functionalities + implementation 'org.apache.commons:commons-compress:1.23.0' // I/O functionalities + implementation 'org.apache.commons:commons-lang3:3.13.0' } tasks.withType(JavaCompile) { diff --git a/docs/readthedocs/_static/figures/uml/DataSourceClassDiagram.png b/docs/readthedocs/_static/figures/uml/DataSourceClassDiagram.png index c1a91f912..8ea07c880 100644 Binary files a/docs/readthedocs/_static/figures/uml/DataSourceClassDiagram.png and b/docs/readthedocs/_static/figures/uml/DataSourceClassDiagram.png differ diff --git a/docs/readthedocs/_static/figures/uml/EntitySourceClassDiagram.png b/docs/readthedocs/_static/figures/uml/EntitySourceClassDiagram.png new file mode 100644 index 000000000..19a94ff8e Binary files /dev/null and b/docs/readthedocs/_static/figures/uml/EntitySourceClassDiagram.png differ diff --git a/docs/readthedocs/_static/figures/uml/FunctionalDataSourceClassDiagram.png b/docs/readthedocs/_static/figures/uml/FunctionalDataSourceClassDiagram.png new file mode 100644 index 000000000..8dca730c3 Binary files /dev/null and b/docs/readthedocs/_static/figures/uml/FunctionalDataSourceClassDiagram.png differ diff --git a/docs/readthedocs/_static/figures/uml/TimeSeriesSourceClassDiagram.png b/docs/readthedocs/_static/figures/uml/TimeSeriesSourceClassDiagram.png new file mode 100644 index 000000000..b111fb370 Binary files /dev/null and b/docs/readthedocs/_static/figures/uml/TimeSeriesSourceClassDiagram.png differ diff --git a/docs/readthedocs/_static/figures/uml/WeatherCoordinateSourceClassDiagram.png b/docs/readthedocs/_static/figures/uml/WeatherCoordinateSourceClassDiagram.png new file mode 100644 index 000000000..7f657673a Binary files /dev/null and b/docs/readthedocs/_static/figures/uml/WeatherCoordinateSourceClassDiagram.png differ diff --git a/docs/readthedocs/conf.py b/docs/readthedocs/conf.py index 08685db9d..cdf07d375 100644 --- a/docs/readthedocs/conf.py +++ b/docs/readthedocs/conf.py @@ -18,12 +18,12 @@ # -- Project information ----------------------------------------------------- project = 'PowerSystemDataModel' -copyright = u'2020. TU Dortmund University, Institute of Energy Systems, Energy Efficiency and Energy Economics, Research group Distribution grid planning and operation ' -author = 'Johannes Hiry, Debopama Sen Sarma, Chris Kittl' +copyright = u'2023. TU Dortmund University, Institute of Energy Systems, Energy Efficiency and Energy Economics, Research group Distribution grid planning and operation ' +author = 'Institute of Energy Systems, Energy Efficiency and Energy Economics' # The full version, including alpha/beta/rc tags -version = '1.0' -release = '1.0.1-SNAPSHOT' +version = '3.0' +release = '3.0.0' pygments_style = 'tango' add_function_parentheses = True @@ -35,12 +35,12 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'recommonmark', - 'sphinx.ext.autosectionlabel' + 'sphinx.ext.intersphinx', + 'myst_parser' ] -# Prefix all autogenerated labels wit the document to get unique labels (e.g. `index:Hello`) -autosectionlabel_prefix_document = True +myst_enable_extensions = ["dollarmath", "amsmath"] +myst_heading_anchors = 4 # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -49,7 +49,8 @@ # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'venv'] - +exclude_trees = ['.build'] +source_suffix = ['.rst', '.md'] source_encoding = 'utf-8-sig' # -- Options for HTML output ------------------------------------------------- diff --git a/docs/readthedocs/models/ValidationUtils.rst b/docs/readthedocs/io/ValidationUtils.md similarity index 66% rename from docs/readthedocs/models/ValidationUtils.rst rename to docs/readthedocs/io/ValidationUtils.md index e1c46094e..d0c38d9e6 100644 --- a/docs/readthedocs/models/ValidationUtils.rst +++ b/docs/readthedocs/io/ValidationUtils.md @@ -1,32 +1,26 @@ -**************** -Validation Utils -**************** +# Validation Utils This page gives an overview about the ValidationUtils in the *PowerSystemDataModel*. -What are the ValidationUtils? -============================= +## What are the ValidationUtils? The methods in ValidationUtils and subclasses can be used to check that objects are valid, meaning their parameters have valid values and they are correctly connected. -What is checked? -================ +## What is checked? - The check methods include checks that assigned values are valid, e.g. lines are not allowed to have negative lengths or the rated power factor of any unit must be between 0 and 1. - Furthermore, several connections are checked, e.g. that lines only connect nodes of the same voltage level or that the voltage levels indicated for the transformer sides match the voltage levels of the nodes they are connected to. -How does it work? -================= -- The method :code:`ValidationUtils.check(Object)` is the only method that should be called by the user. +## How does it work? +- The method `ValidationUtils.check(Object)` is the only method that should be called by the user. - This check method identifies the object class and forwards it to a specific check method for the given object - The overall structure of the ValidationUtils methods follows a cascading scheme, orientated along the class tree -- Example: A :code:`LineInput lineInput` should be checked - 1. :code:`ValidationUtils.check(lineInput)` is called - 2. :code:`ValidationUtils.check(lineInput)` identifies the class of the object as :code:`AssetInput` and calls :code:`ValidationUtils.checkAsset(lineInput)` - 3. :code:`ValidationUtils.checkAsset(lineInput)`, if applicable, checks those parameters that all :code:`AssetInput` have in common (e.g. operation time) and further identifies the object, more specifically, as a :code:`ConnectorInput` and calls :code:`ConnectorValidationUtils.check(lineInput)` - 4. :code:`ConnectorValidationUtils.check(lineInput)`, if applicable, checks those parameters that all :code:`ConnectorInput` have in common and further identifies the object, more specifically, as a :code:`LineInput` and calls :code:`ConnectorValidationUtils.checkLine(lineInput)` - 5. :code:`ConnectorValidationUtils.checkLine(lineInput)` checks all specific parameters of a :code:`LineInput` +- Example: A `LineInput lineInput` should be checked + 1. `ValidationUtils.check(lineInput)` is called + 2. `ValidationUtils.check(lineInput)` identifies the class of the object as `AssetInput` and calls `ValidationUtils.checkAsset(lineInput)` + 3. `ValidationUtils.checkAsset(lineInput)`, if applicable, checks those parameters that all `AssetInput` have in common (e.g. operation time) and further identifies the object, more specifically, as a `ConnectorInput` and calls `ConnectorValidationUtils.check(lineInput)` + 4. `ConnectorValidationUtils.check(lineInput)`, if applicable, checks those parameters that all `ConnectorInput` have in common and further identifies the object, more specifically, as a `LineInput` and calls `ConnectorValidationUtils.checkLine(lineInput)` + 5. `ConnectorValidationUtils.checkLine(lineInput)` checks all specific parameters of a `LineInput` - ValidationUtils furthermore contains several utils methods used in the subclasses -Which objects are checked? -========================== +## Which objects are checked? The ValidationUtils include validation checks for... - NodeValidationUtils @@ -80,8 +74,7 @@ The ValidationUtils include validation checks for... - RawGridElements - SystemParticipants -What should be considered? -========================== +## What should be considered? - Due to many checks with if-conditions, the usage of the ValidationUtils for many objects might be runtime relevant. - The check for a GridContainer includes the interplay of the contained entities as well as the checks of all contained entities. - If new classes are introduced to the *PowerSystemDataModel*, make sure to follow the forwarding structure of the ValidationUtils methods when writing the check methods! diff --git a/docs/readthedocs/io/basiciousage.md b/docs/readthedocs/io/basiciousage.md new file mode 100644 index 000000000..64ce498df --- /dev/null +++ b/docs/readthedocs/io/basiciousage.md @@ -0,0 +1,40 @@ +# I/O + +The PowerSystemDataModel library additionally offers I/O-capabilities. +In the long run, it is our aim to provide many different source and sink technologies. +Therefore, the I/O-package is structured as highly modular. + +```{toctree} +--- +maxdepth: 2 +--- +csvfiles +sql +influxdb +ValidationUtils.md +``` + +## Data sink structure + +[![Class diagram of data sink classes](../_static/figures/uml/DataSinkClassDiagram.png)](../_static/figures/uml/DataSinkClassDiagram.png) + +## Data source structure + +The sources are divided in three blocks: +1. InputEntities and ResultEntities +2. TimeSeries related sources +3. Weather and Coordinate sources + +[![Class diagram of entity sources](../_static/figures/uml/EntitySourceClassDiagram.png)](../_static/figures/uml/EntitySourceClassDiagram.png) + +[![Class diagram of time series sources](../_static/figures/uml/TimeSeriesSourceClassDiagram.png)](../_static/figures/uml/TimeSeriesSourceClassDiagram.png) + +[![Class diagram of weather and coordinate sources](../_static/figures/uml/WeatherCoordinateSourceClassDiagram.png)](../_static/figures/uml/WeatherCoordinateSourceClassDiagram.png) + +The function to read the sources are implemented in the DataSource classes. + +[![Class diagram of data sources](../_static/figures/uml/FunctionalDataSourceClassDiagram.png)](../_static/figures/uml/FunctionalDataSourceClassDiagram.png) + +## Data deployment + +[![Diagram of input data deployment](../_static/figures/uml/InputDataDeployment.png)](../_static/figures/uml/InputDataDeployment.png) diff --git a/docs/readthedocs/io/basiciousage.rst b/docs/readthedocs/io/basiciousage.rst deleted file mode 100644 index cf2e3a915..000000000 --- a/docs/readthedocs/io/basiciousage.rst +++ /dev/null @@ -1,29 +0,0 @@ -### -I/O -### -The PowerSystemDataModel library additionally offers I/O-capabilities. -In the long run, it is our aim to provide many different source and sink technologies. -Therefore, the I/O-package is structured as highly modular. - -.. toctree:: - :maxdepth: 2 - - influxdb - csvfiles - - - -Data sink structure -=================== -.. figure:: ../_static/figures/uml/DataSinkClassDiagram.png - :align: center - :width: 650 - :alt: Class diagram of data sink classes - - -Data deployment -=============== -.. figure:: ../_static/figures/uml/InputDataDeployment.png - :align: center - :width: 650 - :alt: Diagram of input data deployment diff --git a/docs/readthedocs/io/csvfiles.md b/docs/readthedocs/io/csvfiles.md new file mode 100644 index 000000000..e022918ad --- /dev/null +++ b/docs/readthedocs/io/csvfiles.md @@ -0,0 +1,212 @@ +# CSV files + +## Naming of files + +A naming strategy provides a mapping between model classes and the human readable names of those entities to be used +within e.g. the data sinks, in which the serialized representation of several objects of this class can be found. +Currently we offer two different, pre-defined naming strategies, which you might extend to fit your needs: + +1. **EntityPersistenceNamingStrategy**: + A basic naming strategy that is able to add prefix and suffix to the names of the entities. A flat folder structure + is considered. For more details see [Default naming strategy](#default-naming-strategy). +2. **HierarchicFileNamingStrategy**: + An extended version of the EntityPersistenceNamingStrategy. Additionally, the [Default directory hierarchy](#default-directory-hierarchy) is taken + into account. Please note, that this directory hierarchy is only meant to be used in conjunction with input models. + +However, you can control the behaviour of serialization and de-serialization of models by injecting the desired naming +strategy you like into `CsvDataSource` and `CsvFileSink`. + +## Default naming strategy + +There is a default mapping from model class to naming of entities in the case you would like to use csv files for +(de-)serialization of models. +You may extend / alter the naming with pre- or suffix by calling `new EntityPersistenceNamingStrategy("prefix","suffix")`. + +### Input + +| Model | File Name | +|:----------------------------------|:------------------------------------------------------------------------------------------| +| operator | *prefix_* operator_input *_suffix* | +| node | *prefix_* node_input *_suffix* | +| line | *prefix_* line_input *_suffix*
*prefix_* line_type_input *_suffix* | +| switch | *prefix_* switch_input *_suffix* | +| two winding transformer | *prefix_* transformer2w_input *_suffix*
*prefix_* transformer2w_type_input *_suffix* | +| three winding transformer | *prefix_* transformer3w_input *_suffix*
*prefix_* transformer3w_type_input *_suffix* | +| measurement unit | *prefix_* measurement_unit_input *_suffix* | +| biomass plant | *prefix_* bm_input *_suffix*
*prefix_* bm_type_input *_suffix* | +| combined heat and power plant | *prefix_* chp_input *_suffix*
*prefix_* chp_type_input *_suffix* | +| electric vehicle | *prefix_* ev_input *_suffix*
*prefix_* ev_type_input *_suffix* | +| electric vehicle charging station | *prefix_* evcs_input *_suffix* | +| fixed feed in facility | *prefix_* fixed_feed_in_input *_suffix* | +| heat pump | *prefix_* hp_input *_suffix*
*prefix_* hp_type_input *_suffix* | +| load | *prefix_* load_input *_suffix* | +| photovoltaic power plant | *prefix_* pc_input *_suffix* | +| electrical energy storage | *prefix_* storage_input *_suffix*
*prefix_* storage_type_input *_suffix* | +| wind energy converter | *prefix_* wec_input *_suffix*
*prefix_* wec_type_input *_suffix* | +| schematic node graphic | *prefix_* node_graphic_input *_suffix* | +| schematic line graphic | *prefix_* line_graphic_input *_suffix* | + + +### Time Series + +| Model | File Name | +|:-----------------------|:------------------------------------------| +| individual time series | *prefix_* its *_columnScheme_UUID_suffix* | +| load profile input | *prefix_* rts *_profileKey_UUID_suffix* | + + +Let's spend a few more words on the individual time series: +Those files are meant to carry different types of content - one might give information about wholesale market prices, +the other is a record of power values provided by a real system. +To be able to understand, what's inside of the file, the *columnScheme* part of the file name gives insight of it's +content. +The following keys are supported until now: + +| Key | Information and supported head line | +|:--------|:---------------------------------------------------------------------------------------------------------------------------------------------------------| +| c | An energy price (e.g. in €/MWh; c stands for charge).
Permissible head line: ``uuid,time,price`` | +| p | Active power
Permissible head line: ``uuid,time,p`` | +| pq | Active and reactive power
Permissible head line: ``uuid,time,p,q`` | +| h | Heat power demand
Permissible head line: ``uuid,time,h`` | +| ph | Active and heat power
Permissible head line: ``uuid,time,p,h`` | +| pqh | Active, reactive and heat power
Permissible head line: ``uuid,time,p,q,h`` | +| weather | Weather information
Permissible head line: ``uuid,time,coordinate,direct_irradiation,diffuse_irradiation,temperature,wind_velocity,wind_direction`` | + + +As the ``uuid`` and ``time`` field are mandatory, they are not mentioned explicitly, here. + +### Results + + +| Model | File Name | +|:----------------------------------|:--------------------------------------------| +| node | *prefix_* node_res *_suffix* | +| line | *prefix_* line_res *_suffix* | +| switch | *prefix_* switch_res *_suffix* | +| two winding transformer | *prefix_* transformer2w_res *_suffix* | +| three winding transformer | *prefix_* transformer3w_res *_suffix* | +| biomass plant | *prefix_* bm_res *_suffix* | +| combined heat and power plant | *prefix_* chp_res *_suffix* | +| electric vehicle | *prefix_* ev_res *_suffix* | +| electric vehicle charging station | *prefix_* evcs_res\*_suffix* | +| fixed feed in | *prefix_* fixed_feed_in_res *_suffix* | +| heat pump | *prefix_* hp_res *_suffix* | +| load | *prefix_* load_res *_suffix* | +| photovoltaic power plant | *prefix_* pv_res *_suffix* | +| storage | *prefix_* storage_res *_suffix* | +| wind energy converter | *prefix_* wec_res *_suffix* | +| thermal house model | *prefix_* thermal_house_res *_suffix* | +| cylindrical thermal storage | *prefix_* cylindrical_storage_res *_suffix* | + + +## Default directory hierarchy + +Although there is no fixed structure of files mandatory, there is something, we consider to be a good idea of +structuring things. +You may either ship your csv files directly in this structure or compress everything in a .tar.gz file. +However, following this form, we are able to provide you some helpful tools in obtaining and saving your models a bit +easier. + +![Default directory hierarchy for input classes](../_static/figures/uml/DefaultInputDirectoryHierarchy.png) +Default directory hierarchy for input classes + +![Default directory hierarchy for result classes](../_static/figures/uml/DefaultResultDirectoryHierarchy.png) +Default directory hierarchy for result classes + +The italic parts are optional and the others are mandatory. +As you see, this still is a pretty flexible approach, as you only need to provide, what you really need. +However, note that this hierarchy is only meant to be used in conjunction with input models, yet. + +The class `DefaultInputHierarchy` offers some helpful methods to validate and create a default input file +hierarchy. + +## De-serialization (loading models) + +Having an instance of [Grid Container](/models/input/grid/gridcontainer) is most of the time the target whenever you load your +grid. It consists of the three main blocks: + +1. [Raw grid elements](/models/input/grid/gridcontainer) +2. [System participants](/models/input/grid/gridcontainer) +3. [Graphics](/models/input/grid/gridcontainer) + +Those blocks are also reflected in the structure of data source interface definitions. +There is one source for each of the containers, respectively. + +As a full data set has references among the models (e.g. a line model points to its' nodes it connects), there is a +hierarchical structure, in which models have to be loaded. +Therefore, the different sources have also references among themselves. +An application example to load an *exampleGrid* from csv files located in `./exampleGrid` could look like this: + + +``` java + /* Parameterization */ + String gridName = "exampleGrid"; + String csvSep = ","; + String folderPath = "./exampleGrid"; + EntityPersistenceNamingStrategy namingStrategy = new EntityPersistenceNamingStrategy(); // Default naming strategy + + /* Instantiating sources */ + TypeSource typeSource = new CsvTypeSource(csvSep, folderPath, namingStrategy); + RawGridSource rawGridSource = new CsvRawGridSource(csvSep, folderPath, namingStrategy, typeSource); + ThermalSource thermalSource = new CsvThermalSource(csvSep, folderPath, namingStrategy, typeSource); + SystemParticipantSource systemParticipantSource = new CsvSystemParticipantSource( + csvSep, + folderPath, + namingStrategy, + typeSource, + thermalSource, + rawGridSource + ); + GraphicSource graphicsSource = new CsvGraphicSource( + csvSep, + folderPath, + namingStrategy, + typeSource, + rawGridSource + ); + + /* Loading models */ + RawGridElements rawGridElements = rawGridSource.getGridData().orElseThrow( + () -> new SourceException("Error during reading of raw grid data.")); + SystemParticipants systemParticipants = systemParticipantSource.getSystemParticipants().orElseThrow( + () -> new SourceException("Error during reading of system participant data.")); + GraphicElements graphicElements = graphicsSource.getGraphicElements().orElseThrow( + () -> new SourceException("Error during reading of graphic elements.")); + JointGridContainer fullGrid = new JointGridContainer( + gridName, + rawGridElements, + systemParticipants, + graphicElements + ); +``` + +As observable from the code, it doesn't play a role, where the different parts come from. +It is also a valid solution, to receive types from file, but participants and raw grid elements from a data base. +Only prerequisite is an implementation of the different interfaces for the desired data source. + +## Serialization (writing models) + +Serializing models is a bit easier: + +``` java + /* Parameterization */ + String csvSep = ","; + String folderPath = "./exampleGrid"; + EntityPersistenceNamingStrategy namingStrategy = new EntityPersistenceNamingStrategy(); + boolean initEmptyFiles = false; + + /* Instantiating the sink */ + CsvFileSink sink = new CsvFileSink(folderPath, namingStrategy, initEmptyFiles, csvSep); + sink.persistJointGridContainer(grid); +``` + +The sink takes a collection of model suitable for serialization and handles the rest (e.g. unboxing of nested models) +on its own. +But caveat: As the (csv) writers are implemented in a concurrent, non-blocking way, duplicates of nested models could +occur. + +## Compression and extraction of files + +We consider either regular directories or compressed [tarball archives](https://en.wikipedia.org/wiki/Tar_(computing)) +(`*.tar.gz`) as source of input files. +The class `TarballUtils` offers some helpful functions to compress or extract input data files for easier shipping. \ No newline at end of file diff --git a/docs/readthedocs/io/csvfiles.rst b/docs/readthedocs/io/csvfiles.rst deleted file mode 100644 index 35ab4b8c8..000000000 --- a/docs/readthedocs/io/csvfiles.rst +++ /dev/null @@ -1,292 +0,0 @@ -********* -csv files -********* - -Naming of files -=============== -A naming strategy provides a mapping between model classes and the human readable names of those entities to be used -within e.g. the data sinks, in which the serialized representation of several objects of this class can be found. -Currently we offer two different, pre-defined naming strategies, which you might extend to fit your needs: - -1. **EntityPersistenceNamingStrategy**: - A basic naming strategy that is able to add prefix and suffix to the names of the entities. A flat folder structure - is considered. For more details see `Default naming strategy`_. -2. **HierarchicFileNamingStrategy**: - An extended version of the EntityPersistenceNamingStrategy. Additionally, the `Default directory hierarchy`_ is taken - into account. Please note, that this directory hierarchy is only meant to be used in conjunction with input models. - -However, you can control the behaviour of serialization and de-serialization of models by injecting the desired naming -strategy you like into :code:`CsvDataSource` and :code:`CsvFileSink`. - -Default naming strategy -======================= -There is a default mapping from model class to naming of entities in the case you would like to use csv files for -(de-)serialization of models. -You may extend / alter the naming with pre- or suffix by calling :code:`new EntityPersistenceNamingStrategy("prefix","suffix")`. - -Input ------ - -+--------------------------------------------------------+--------------------------------------------------+ -| Model | File Name | -+========================================================+==================================================+ -| :ref:`operator` | *prefix_*\ operator_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`node` | *prefix_*\ node_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`line` | | *prefix_*\ line_input\ *_suffix* | -| | | *prefix_*\ line_type_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`switch` | *prefix_*\ switch_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`two winding transformer` | | *prefix_*\ transformer2w_input\ *_suffix* | -| | | *prefix_*\ transformer2w_type_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`three winding transformer` | | *prefix_*\ transformer3w_input\ *_suffix* | -| | | *prefix_*\ transformer3w_type_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`measurement unit` | *prefix_*\ measurement_unit_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`biomass plant` | | *prefix_*\ bm_input\ *_suffix* | -| | | *prefix_*\ bm_type_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`combined heat and power plant` | | *prefix_*\ chp_input\ *_suffix* | -| | | *prefix_*\ chp_type_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`electric vehicle` | | *prefix_*\ ev_input\ *_suffix* | -| | | *prefix_*\ ev_type_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`electric vehicle charging station` | *prefix_*\ evcs_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`fixed feed in facility` | *prefix_*\ fixed_feed_in_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`heat pump` | | *prefix_*\ hp_input\ *_suffix* | -| | | *prefix_*\ hp_type_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`load` | *prefix_*\ load_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`photovoltaic power plant` | *prefix_*\ pc_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`electrical energy storage` | | *prefix_*\ storage_input\ *_suffix* | -| | | *prefix_*\ storage_type_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`wind energy converter` | | *prefix_*\ wec_input\ *_suffix* | -| | | *prefix_*\ wec_type_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`schematic node graphic` | *prefix_*\ node_graphic_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ -| :ref:`schematic line graphic` | *prefix_*\ line_graphic_input\ *_suffix* | -+--------------------------------------------------------+--------------------------------------------------+ - -Time Series ------------ - -+-------------------------------------------------------+---------------------------------------------+ -| Model | File Name | -+=======================================================+=============================================+ -| :ref:`individual time series` | *prefix_*\ its\ *_columnScheme_UUID_suffix* | -+-------------------------------------------------------+---------------------------------------------+ -| :ref:`load profile input` | *prefix_*\ rts\ *_profileKey_UUID_suffix* | -+-------------------------------------------------------+---------------------------------------------+ - -Let's spend a few more words on the individual time series: -Those files are meant to carry different types of content - one might give information about wholesale market prices, -the other is a record of power values provided by a real system. -To be able to understand, what's inside of the file, the *columnScheme* part of the file name gives insight of it's -content. -The following keys are supported until now: - -+---------+----------------------------------------------------------------------------------------------------------------+ -| Key | Information and supported head line | -+=========+================================================================================================================+ -| c | | An energy price (e.g. in €/MWh; c stands for charge). | -| | | Permissible head line: ``uuid,time,price`` | -+---------+----------------------------------------------------------------------------------------------------------------+ -| p | | Active power | -| | | Permissible head line: ``uuid,time,p`` | -+---------+----------------------------------------------------------------------------------------------------------------+ -| pq | | Active and reactive power | -| | | Permissible head line: ``uuid,time,p,q`` | -+---------+----------------------------------------------------------------------------------------------------------------+ -| h | | Heat power demand | -| | | Permissible head line: ``uuid,time,h`` | -+---------+----------------------------------------------------------------------------------------------------------------+ -| ph | | Active and heat power | -| | | Permissible head line: ``uuid,time,p,h`` | -+---------+----------------------------------------------------------------------------------------------------------------+ -| pqh | | Active, reactive and heat power | -| | | Permissible head line: ``uuid,time,p,q,h`` | -+---------+----------------------------------------------------------------------------------------------------------------+ -| weather | | Weather information | -| | | Permissible head line: | -| | | ``uuid,time,coordinate,direct_irradiation,diffuse_irradiation,temperature,wind_velocity,wind_direction`` | -+---------+----------------------------------------------------------------------------------------------------------------+ - -As the ``uuid`` and ``time`` field are mandatory, they are not mentioned explicitly, here. - -Results -------- - -+---------------------------------------------------------------+-----------------------------------------------+ -| Model | File Name | -+===============================================================+===============================================+ -| :ref:`node` | *prefix_*\ node_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`line` | *prefix_*\ line_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`switch` | *prefix_*\ switch_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`two winding transformer` | *prefix_*\ transformer2w_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`three winding transformer` | *prefix_*\ transformer3w_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`biomass plant` | *prefix_*\ bm_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`combined heat and power plant` | *prefix_*\ chp_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`electric vehicle` | *prefix_*\ ev_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`electric vehicle charging station` | *prefix_*\ evcs_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`fixed feed in` | *prefix_*\ fixed_feed_in_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`heat pump` | *prefix_*\ hp_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`load` | *prefix_*\ load_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`photovoltaic power plant` | *prefix_*\ pv_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`storage` | *prefix_*\ storage_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`wind energy converter` | *prefix_*\ wec_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`thermal house model` | *prefix_*\ thermal_house_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ -| :ref:`cylindrical thermal storage` | *prefix_*\ cylindrical_storage_res\ *_suffix* | -+---------------------------------------------------------------+-----------------------------------------------+ - -Default directory hierarchy -=========================== -Although there is no fixed structure of files mandatory, there is something, we consider to be a good idea of -structuring things. -You may either ship your csv files directly in this structure or compress everything in a .tar.gz file. -However, following this form, we are able to provide you some helpful tools in obtaining and saving your models a bit -easier. - -.. figure:: ../_static/figures/uml/DefaultInputDirectoryHierarchy.png - :align: center - :alt: Default directory hierarchy for input classes - :width: 650 - - Default directory hierarchy for input classes - -.. figure:: ../_static/figures/uml/DefaultResultDirectoryHierarchy.png - :align: center - :alt: Default directory hierarchy for result classes - :width: 650 - - Default directory hierarchy for result classes - -The italic parts are optional and the others are mandatory. -As you see, this still is a pretty flexible approach, as you only need to provide, what you really need. -However, note that this hierarchy is only meant to be used in conjunction with input models, yet. - -The class :code:`DefaultInputHierarchy` offers some helpful methods to validate and create a default input file -hierarchy. - -De-serialization (loading models) -================================= -Having an instance of :ref:`Grid Container` is most of the time the target whenever you load your -grid. It consists of the three main blocks: - - 1. :ref:`Raw grid elements` - 2. :ref:`System participants` - 3. :ref:`Graphics` - -Those blocks are also reflected in the structure of data source interface definitions. -There is one source for each of the containers, respectively. - -.. figure:: ../_static/figures/uml/DataSourceClassDiagram.png - :align: center - :alt: Class diagram of data sources - :width: 650 - - Class diagram of data sources - -As a full data set has references among the models (e.g. a line model points to its' nodes it connects), there is a -hierarchical structure, in which models have to be loaded. -Therefore, the different sources have also references among themselves. -An application example to load an *exampleGrid* from csv files located in :code:`./exampleGrid` could look like this: - -.. code-block:: java - - /* Parameterization */ - String gridName = "exampleGrid"; - String csvSep = ","; - String folderPath = "./exampleGrid"; - EntityPersistenceNamingStrategy namingStrategy = new EntityPersistenceNamingStrategy(); // Default naming strategy - - /* Instantiating sources */ - TypeSource typeSource = new CsvTypeSource(csvSep, folderPath, namingStrategy); - RawGridSource rawGridSource = new CsvRawGridSource(csvSep, folderPath, namingStrategy, typeSource); - ThermalSource thermalSource = new CsvThermalSource(csvSep, folderPath, namingStrategy, typeSource); - SystemParticipantSource systemParticipantSource = new CsvSystemParticipantSource( - csvSep, - folderPath, - namingStrategy, - typeSource, - thermalSource, - rawGridSource - ); - GraphicSource graphicsSource = new CsvGraphicSource( - csvSep, - folderPath, - namingStrategy, - typeSource, - rawGridSource - ); - - /* Loading models */ - RawGridElements rawGridElements = rawGridSource.getGridData().orElseThrow( - () -> new SourceException("Error during reading of raw grid data.")); - SystemParticipants systemParticipants = systemParticipantSource.getSystemParticipants().orElseThrow( - () -> new SourceException("Error during reading of system participant data.")); - GraphicElements graphicElements = graphicsSource.getGraphicElements().orElseThrow( - () -> new SourceException("Error during reading of graphic elements.")); - JointGridContainer fullGrid = new JointGridContainer( - gridName, - rawGridElements, - systemParticipants, - graphicElements - ); - -As observable from the code, it doesn't play a role, where the different parts come from. -It is also a valid solution, to receive types from file, but participants and raw grid elements from a data base. -Only prerequisite is an implementation of the different interfaces for the desired data source. - -Serialization (writing models) -============================== -Serializing models is a bit easier: - -.. code-block:: java - - /* Parameterization */ - String csvSep = ","; - String folderPath = "./exampleGrid"; - EntityPersistenceNamingStrategy namingStrategy = new EntityPersistenceNamingStrategy(); - boolean initEmptyFiles = false; - - /* Instantiating the sink */ - CsvFileSink sink = new CsvFileSink(folderPath, namingStrategy, initEmptyFiles, csvSep); - sink.persistJointGridContainer(grid); - -The sink takes a collection of model suitable for serialization and handles the rest (e.g. unboxing of nested models) -on its own. -But caveat: As the (csv) writers are implemented in a concurrent, non-blocking way, duplicates of nested models could -occur. - -Compression and extraction of files -=================================== -We consider either regular directories or compressed `tarball archives `_ -(:code:`*.tar.gz`) as source of input files. -The class :code:`TarballUtils` offers some helpful functions to compress or extract input data files for easier shipping. \ No newline at end of file diff --git a/docs/readthedocs/io/sql.md b/docs/readthedocs/io/sql.md new file mode 100644 index 000000000..71edba1a3 --- /dev/null +++ b/docs/readthedocs/io/sql.md @@ -0,0 +1,8 @@ +# SQL + + +## Id Coordinate Source +The sql implementation of id coordinate source uses [PostgreSql](https://www.postgresql.org/) with the +addon [PostGis](https://postgis.net/). `PostGis` is used to improve the querying of geographical data. +The `Coordinate` attribute is stored as a [Geography](http://postgis.net/workshops/postgis-intro/geography.html) with +the type [Point](https://postgis.net/docs/ST_Point.html) and the default SRID 4326. diff --git a/docs/readthedocs/models/input/additionaldata/idcoordinatesource.md b/docs/readthedocs/models/input/additionaldata/idcoordinatesource.md new file mode 100644 index 000000000..665490477 --- /dev/null +++ b/docs/readthedocs/models/input/additionaldata/idcoordinatesource.md @@ -0,0 +1,92 @@ +# IdCoordinateSource +An id coordinate source provides a mapping between ids of a coordinate and the actual coordinates +latitude and longitude values. The id coordinate source itself is an interface that provides some +methods to get coordinates, ids of coordinates or the distance between a given coordinate and other +coordinates. + + +## Information + +| Attribute | Remarks | +|:-------------|:---------------------------------------------------------------| +| `Id` | An integer value for identifying the coordinate. | +| `Coordiante` | Geographical information presented as `Lat/long` of a `Point`. | + + + +## Known implementations: +The following implementations are currently known: + +- [Csv Id Coordinate Source](/io/csvfiles) +- [Sql Id Coordinate Source](/io/sql) + + +## Method for coordinates: +The IdCoordinateSource contains method for returning coordinates for given ids. + +``` java + Optional getCoordinate(int id) + Collection getCoordinates(int... ids) + Collection getAllCoordinates() +``` + +1. This method is used to return the coordinate of a given id. If no coordinate is found for +the given id, an empty optional is returned. + +2. This method is used to return the coordinates of a given set of ids. The method will only return +coordinates for existing ids. + +3. This method is used to return all available coordinates. + + +## Method for ids: +The IdCoordinateSource contains a method for retrieving the id of a given coordinate. + +``` java + Optional getId(Point coordinate) +``` + +This method is used to return the id of a given coordinate. If no id is found for the given +coordinate, an empty optional is returned. + + +## Method for retrieving near coordinates: +The IdCoordinateSource also contains methods for retrieving coordinates/points that are near a given coordinate. +All implementations of these methods in this project will use the method ``restrictToBoundingBox`` for finding and +returning four corner points. + +``` java + List getNearestCoordinates(Point coordinatem int n) + List getClosestCoordinates(Point coordinate, int n, ComparableQuantity distance) + List calculateCoordinateDistances(Point coordinate, int n, Collection coordinates) +``` + +1. This method will return the nearest n coordinates for a given coordinate. The method works by having a default radius +that is increased with every iteration until n coordinates are found. + +2. This method will return the closest n coordinates for a given coordinate. Unlike the first method, this method has a +defined radius, that won't be increased. Therefor this method can only consider the coordinates inside the bounding box +around this radius. + +3. This method is used to calculate the distances to a set of give coordinates. After the calculation +the method will return the closest n coordinates. If the number of distances is less than n, this method will +return less than n coordinates. + + +## Finding and returning the closest corner coordinates: +In most cases we need four corner coordinates for our given coordinate. Therefor the +IdCoordinateSource contains a method that will use the calculated distances to find the closest +corner coordinates for the given coordinate. + +``` java + List restrictToBoundingBox( + Point coordinate, + Collection distances, + int numberOfPoints + ) +``` + +For a given set of coordinates, the closest four corner coordinates plus more close points if n > 4 +are returned. If n < 4 the method will return the closest n corner coordinates. If the set of +coordinates contains a coordinate that matches the given coordinate, only this one coordinate is +returned. If n > number of coordinates in the set, all coordinates are returned. \ No newline at end of file diff --git a/docs/readthedocs/models/input/additionaldata/timeseries.md b/docs/readthedocs/models/input/additionaldata/timeseries.md new file mode 100644 index 000000000..9590a8404 --- /dev/null +++ b/docs/readthedocs/models/input/additionaldata/timeseries.md @@ -0,0 +1,30 @@ +# Time Series +Time series are meant to represent a timely ordered series of values. +Those can either be electrical or non-electrical depending on what one may need for power system simulations. +Our time series models are divided into two subtypes: + +## Individual Time Series +Each time instance in this time series has its own value (random duplicates may occur obviously). +They are only applicable for the time frame that is defined by the content of the time series. + +## Repetitive Time Series +Those time series do have repetitive values, e.g. each day or at any other period. +Therefore, they can be applied to any time frame, as the mapping from time instant to value is made by information +reduction. +In addition to actual data, a mapping function has to be known. + +## Available Classes +To be as flexible, as possible, the actual content of the time series is given as children of the `Value` class. +The following different values are available: + +| Value Class | Purpose | +|:-----------------------|:--------------------------------------------------------------------------------------------------------------| +| `PValue` | Electrical active power | +| `SValue` | Electrical active and reactive power | +| `HeatAndPValue` | Combination of thermal power (e.g. in kW)
and electrical active power (e.g. in kW) | +| `HeatAndSValue` | Combination of thermal power (e.g. in kW)
and electrical active and reactive power (e.g. in kW and kVAr) | +| `EnergyPriceValue` | Wholesale market price (e.g. in € / MWh) | +| `SolarIrradianceValue` | Combination of diffuse and direct solar irradiance | +| `TemperatureValue` | Temperature information | +| `WindValue` | Combination of wind direction and wind velocity | +| `WeatherValue` | Combination of irradiance, temperature and wind information | diff --git a/docs/readthedocs/models/input/grid/gridcontainer.md b/docs/readthedocs/models/input/grid/gridcontainer.md new file mode 100644 index 000000000..e40d20c5c --- /dev/null +++ b/docs/readthedocs/models/input/grid/gridcontainer.md @@ -0,0 +1,92 @@ +# Grid Container + +The grid container groups all entities that are able to form a full grid model. +Two types of grid containers are available: + +**JointGridContainer**
+This one is able to hold a grid model spanning several voltage levels. +On instantiation, a sub grid topology graph is built. +This graph holds `SubGridContainers` as vertices and transformer models as edges. +Thereby, you are able to discover the topology of galvanically separated sub grids and access those sub models +directly. + +and + +**SubGridContainer**
+This one is meant to hold all models, that form a galvanically separated sub grid. +In contrast to the `JointGridContainer` it only covers one voltage level and therefore has an additional field +for the predominant voltage level apparent in the container. +Why predominant? +As of convention, the `SubGridContainers` hold also reference to the transformers leading to higher sub grids +and their higher voltage coupling point. + +![Sub grid boundary definition for transformers with upstream switchgear](../../../_static/figures/transformerWithSwitchGear.png) + +Let's shed a more detailed light on the boundaries of a sub grid as of our definition. +This especially is important, if the switchgear of the transformer is modeled in detail. +We defined, that all nodes in upstream direction of the transformer, that are connected by switches *only* (therefore +are within the switchgear) are counted towards the inferior sub grid structure (here "2"), although they belong to a +different voltage level. +This decision is taken, because we assume, that the interest to operate on the given switchgear will most likely be +placed in the inferior grid structure. + +The "real" coupling node A is not comprised in the sub grids node collection, but obviously has reference through the +switch between nodes A and B. + +A synoptic overview of both classes' attributes is given here: + +## Attributes, Units and Remarks + +| Attribute | Unit | Remarks | +|:------------------------|:-----|:--------------------------------------------------| +| gridName | -- | Human readable identifier | +| rawGrid | -- | see below | +| systemParticipants | -- | see below | +| graphics | -- | see below | +| subGridTopologyGraph | -- | topology of sub grids - only `JointGridContainer` | +| predominantVoltageLevel | -- | main voltage level - only `SubGridContainer` | +| subnet | -- | sub grid number - only `SubGridContainer` | + + +### RawGridElements +This sub container simply holds: + +* [nodes](/models/input/grid/node) +* [lines](/models/input/grid/line) +* [switches](/models/input/grid/switch) +* [two winding transformers](/models/input/grid/transformer2w) +* [three winding transformers](/models/input/grid/transformer3w) +* [measurement units](/models/input/grid/measurementunit) + + +### SystemParticipants +This sub container simply holds: + +* [biomass plant](/models/input/participant/bm) +* [combined heat and power plant](/models/input/participant/chp) +- [electric vehicles](/models/input/participant/ev) +- [electric vehicle charging stations](/models/input/participant/evcs) +- [fixed feed in facilities](/models/input/participant/fixedfeedin) +- [heat pumps](/models/input/participant/hp) +- [loads](/models/input/participant/load) +- [photovoltaic power plants](/models/input/participant/pv) +- [electrical energy storages](/models/input/participant/storage) +- [wind energy converters](/models/input/participant/wec) + +and the needed nested thermal models. + + +### Graphics +This sub container simply holds: + +* [schematic node graphics](/models/input/grid/nodegraphic) +* [schematic line graphics](/models/input/grid/linegraphic) + + +## Container Concept + +![Model container concept](../../../_static/figures/uml/ModelContainerConcept.png) + +## Caveats +Nothing - at least not known. +If you found something, please contact us! \ No newline at end of file diff --git a/docs/readthedocs/models/input/grid/gridcontainer.rst b/docs/readthedocs/models/input/grid/gridcontainer.rst deleted file mode 100644 index 85503c08c..000000000 --- a/docs/readthedocs/models/input/grid/gridcontainer.rst +++ /dev/null @@ -1,113 +0,0 @@ -.. _grid_container_model: - -Grid Container --------------- -The grid container groups all entities that are able to form a full grid model. -Two types of grid containers are available: - -JointGridContainer - This one is able to hold a grid model spanning several voltage levels. - On instantiation, a sub grid topology graph is built. - This graph holds :code:`SubGridContainers` as vertices and transformer models as edges. - Thereby, you are able to discover the topology of galvanically separated sub grids and access those sub models - directly. - -and - -SubGridContainer - This one is meant to hold all models, that form a galvanically separated sub grid. - In contrast to the :code:`JointGridContainer` it only covers one voltage level and therefore has an additional field - for the predominant voltage level apparent in the container. - Why predominant? - As of convention, the :code:`SubGridContainers` hold also reference to the transformers leading to higher sub grids - and their higher voltage coupling point. - - .. figure:: ../../../_static/figures/transformerWithSwitchGear.png - :align: center - :alt: Sub grid boundary definition for transformers with upstream switchgear - - Let's shed a more detailed light on the boundaries of a sub grid as of our definition. - This especially is important, if the switchgear of the transformer is modeled in detail. - We defined, that all nodes in upstream direction of the transformer, that are connected by switches *only* (therefore - are within the switchgear) are counted towards the inferior sub grid structure (here "2"), although they belong to a - different voltage level. - This decision is taken, because we assume, that the interest to operate on the given switchgear will most likely be - placed in the inferior grid structure. - - The "real" coupling node A is not comprised in the sub grids node collection, but obviously has reference through the - switch between nodes A and B. - -A synoptic overview of both classes' attributes is given here: - -Attributes, Units and Remarks -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -+-------------------------+------+---------------------------------------------------------+ -| Attribute | Unit | Remarks | -+=========================+======+=========================================================+ -| gridName | -- | Human readable identifier | -+-------------------------+------+---------------------------------------------------------+ -| rawGrid | -- | see below | -+-------------------------+------+---------------------------------------------------------+ -| systemParticipants | -- | see below | -+-------------------------+------+---------------------------------------------------------+ -| graphics | -- | see below | -+-------------------------+------+---------------------------------------------------------+ -| subGridTopologyGraph | -- | topology of sub grids - only :code:`JointGridContainer` | -+-------------------------+------+---------------------------------------------------------+ -| predominantVoltageLevel | -- | main voltage level - only :code:`SubGridContainer` | -+-------------------------+------+---------------------------------------------------------+ -| subnet | -- | sub grid number - only :code:`SubGridContainer` | -+-------------------------+------+---------------------------------------------------------+ - -.. _grid_container_raw_grid_elements: - -RawGridElements -""""""""""""""" -This sub container simply holds: - - * :ref:`nodes` - * :ref:`lines` - * :ref:`switches` - * :ref:`two winding transformers` - * :ref:`three winding transformers` - * :ref:`measurement units` - -.. _grid_container_system_participants: - -SystemParticipants -"""""""""""""""""" -This sub container simply holds: - - * :ref:`biomass plants` - * :ref:`combined heat and power plants` - * :ref:`electric vehicles` - * :ref:`electric vehicle charging stations` - * :ref:`fixed feed in facilities` - * :ref:`heat pumps` - * :ref:`loads` - * :ref:`photovoltaic power plants` - * :ref:`electrical energy storages` - * :ref:`wind energy converters` - -and the needed nested thermal models. - -.. _grid_container_graphics: - -Graphics -"""""""" -This sub container simply holds: - - * :ref:`schematic node graphics` - * :ref:`schematic line graphics` - -Container Concept -""""""""""""""""" - .. figure:: ../../../_static/figures/uml/ModelContainerConcept.png - :align: center - :width: 650 - :alt: Model container concept - -Caveats -^^^^^^^ -Nothing - at least not known. -If you found something, please contact us! \ No newline at end of file diff --git a/docs/readthedocs/models/input/grid/line.md b/docs/readthedocs/models/input/grid/line.md new file mode 100644 index 000000000..7c8facdf7 --- /dev/null +++ b/docs/readthedocs/models/input/grid/line.md @@ -0,0 +1,102 @@ +# Line + +Representation of an AC line. + +## Attributes, Units and Remarks + +### Type Model + +| Attribute | Unit | Remarks | +|:----------|:--------|:--------------------------------------------| +| uuid | -- | | +| id | -- | Human readable identifier | +| r | Ω / km | Phase resistance per length | +| x | Ω / km | Phase reactance per length | +| g | µS / km | Phase-to-ground conductance per length | +| b | µS / km | Phase-to-ground susceptance per length | +| iMax | A | Maximum permissible current | +| vRated | kV | Rated voltage | + + +A list with some standard line types can be found here: `Standard Line Types`_ + + +### Entity Model + +| Attribute | Unit | Remarks | +|:------------------|:-----|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| uuid | -- | | +| id | -- | Human readable identifier | +| operator | -- | | +| operationTime | -- | Timely restriction of operation | +| nodeA | -- | | +| nodeB | -- | | +| parallelDevices | -- | overall amount of parallel lines to automatically
construct (e.g. parallelDevices = 2 will build a
total of two lines using the specified parameters) | +| type | -- | | +| length | km | | +| geoPosition | -- | Line string of geographical locations describing the
position of the line | +| olmCharacteristic | -- | Characteristic of possible overhead line monitoring
Can be given in the form of `olm:{}`.
The pairs are wind velocity in x and permissible
loading in y. | + + + +## Standard Line Types + +Following there are some standard line types with their source. A ``csv file`` containing the types listed below can be found +`here `_. This file can be used directly +for any simulation with ``simona``. +The lines which source is ``simBench`` are from `here `_. + + +### Overhead Lines + +Some standard overhead lines. + +| uuid | b | g | iMax | id | r | vRated | x | source | +|:--------------------------------------|--------:|----:|--------:|--------------------:|---------:|-------:|---------:|:---------| +| 91617ab8-3de2-4fba-be45-a54473ba09a9 | 3.61283 | 0.0 | 1300.0 | LineType_1 | 0.08 | 380.0 | 0.32 | simBench | +| b3b231ae-a971-4432-80d7-4ce2f2a56a32 | 3.22799 | 0.0 | 1950.0 | LineType_4 | 0.033333 | 380.0 | 0.333333 | simBench | +| 24595f91-8295-41f8-a3d8-c9418d860d9c | 1.076 | 0.0 | 650.0 | LineType_6 | 0.1 | 380.0 | 1.0 | simBench | +| f0fc57ec-aa5a-4484-b870-be70a5428cbd | 6.45597 | 0.0 | 3900.0 | LineType_9 | 0.016667 | 380.0 | 0.166667 | simBench | +| ba70d8e7-b082-49bc-8c45-3c10e1236c3e | 8.60796 | 0.0 | 5200.0 | LineType_10 | 0.0125 | 380.0 | 0.125 | simBench | +| veee8eeed-62c9-4345-aa5a-3743fe32007d | 12.9119 | 0.0 | 7800.0 | LineType_11 | 0.008333 | 380.0 | 0.083333 | simBench | +| d2b16935-dcd7-44d2-8623-cec4c703ccdc | 17.2159 | 0.0 | 10400.0 | LineType_12 | 0.00625 | 380.0 | 0.0625 | simBench | +| a490c96e-6e90-485a-b0d7-adeb81fa09cd | 4.30398 | 0.0 | 2600.0 | LineType_2 | 0.025 | 220.0 | 0.25 | simBench | +| 5272bcbc-7d0e-4759-85fa-27943fd8d19c | 2.15199 | 0.0 | 1300.0 | LineType_3 | 0.05 | 220.0 | 0.5 | simBench | +| dd0bac07-de8d-4608-af36-b8ff2819f55a | 7.22566 | 0.0 | 2600.0 | LineType_5 | 0.04 | 220.0 | 0.16 | simBench | +| 64c1dcb5-57a5-4f35-b2bf-9ae4e6cc4943 | 1.80642 | 0.0 | 650.0 | LineType_7 | 0.16 | 220.0 | 0.64 | simBench | +| bdc83a85-c796-4bcb-8b79-8988dc2804f8 | 5.41925 | 0.0 | 1950.0 | LineType_8 | 0.053333 | 220.0 | 0.213333 | simBench | +| 3d75fb6b-f0be-4451-ab4c-7f00c0ebd619 | 2.8274 | 0.0 | 680.0 | Al/St_265/35 | 0.1095 | 110.0 | 0.296 | simBench | +| f5dcaf44-7a9a-4b85-89ba-5c15c04c5766 | 3.45575 | 0.0 | 105.0 | 15-AL1/3-ST1A 20.0 | 1.8769 | 20.0 | 0.409 | simBench | +| 9cbf484b-7256-4e7a-9c35-3e1049909aa0 | 3.53429 | 0.0 | 140.0 | 24-AL1/4-ST1A 20.0 | 1.2012 | 20.0 | 0.394 | simBench | +| 5b542a50-b0c2-4497-ba90-b2b31aafaa0b | 2.87456 | 0.0 | 170.0 | 34-AL1/6-ST1A 20.0 | 0.8342 | 20.0 | 0.382 | simBench | +| d594cd67-4459-44bc-9594-db710372db71 | 2.98451 | 0.0 | 210.0 | 48-AL1/8-ST1A 20.0 | 0.5939 | 20.0 | 0.372 | simBench | +| 305e60ad-cfd2-4127-9d83-8d9b21942d93 | 3.04734 | 0.0 | 290.0 | 70-AL1/11-ST1A 20.0 | 0.4132 | 20.0 | 0.36 | simBench | + + +### Cables + +Some standard cables. + +| uuid | b | g | iMax | id | r | vRated | x | source | +|:-------------------------------------|--------:|----:|------:|-----------------------------:|-------:|-------:|----------:|:---------| +| cc59abd4-770b-45d2-98c8-919c91f1ca4b | 58.7478 | 0.0 | 652.0 | 1x630_RM/50 | 0.122 | 110.0 | 0.122522 | simBench | +| 82ea1b98-2b21-48bd-841a-8d17d8ac20c9 | 59.3761 | 0.0 | 158.0 | NA2XS2Y 1x50 RM/25 12/20 kV | 0.64 | 20.0 | 0.145 | simBench | +| 4adef9e6-5e40-416d-8bd2-b6768d156c54 | 59.6903 | 0.0 | 220.0 | NA2XS2Y 1x70 RM/25 12/20 kV | 0.443 | 20.0 | 0.132 | simBench | +| d5c03484-59c2-44d5-a2ee-63a5a0d623b4 | 67.8584 | 0.0 | 252.0 | NA2XS2Y 1x95 RM/25 12/20 kV | 0.313 | 20.0 | 0.132 | simBench | +| 9c13909d-1dd1-4e2d-980b-55345bdf0fd0 | 72.2566 | 0.0 | 283.0 | NA2XS2Y 1x120 RM/25 12/20 kV | 0.253 | 20.0 | 0.119 | simBench | +| 36243493-eb31-4e81-bd13-b54ef59c4cbe | 78.5398 | 0.0 | 319.0 | NA2XS2Y 1x150 RM/25 12/20 kV | 0.206 | 20.0 | 0.116 | simBench | +| 437689f8-366d-4b04-b42d-d7a754db074b | 85.7655 | 0.0 | 362.0 | NA2XS2Y 1x185 RM/25 12/20 kV | 0.161 | 20.0 | 0.117 | simBench | +| b459115d-d4eb-47d4-b7ec-625339ee0dcc | 95.5044 | 0.0 | 421.0 | NA2XS2Y 1x240 RM/25 12/20 kV | 0.122 | 20.0 | 0.112 | simBench | +| 9aed5818-c037-4033-8d15-806c62d70b8f | 113.097 | 0.0 | 315.0 | NA2XS2Y 1x150 RM/25 6/10 kV | 0.206 | 10.0 | 0.11 | simBench | +| 60d37bc7-157a-4c32-b1b5-e74c10d70531 | 127.549 | 0.0 | 358.0 | NA2XS2Y 1x185 RM/25 6/10 kV | 0.161 | 10.0 | 0.11 | simBench | +| a3ced617-2ffd-4593-b8e9-bcad9a521aab | 143.257 | 0.0 | 416.0 | NA2XS2Y 1x240 RM/25 6/10 kV | 0.122 | 10.0 | 0.105 | simBench | +| f0484bb6-9d0d-4d13-bfbe-b83783b8352a | 150.796 | 0.0 | 471.0 | NA2XS2Y 1x300 RM/25 6/10 kV | 0.1 | 10.0 | 0.0974 | simBench | +| 6b223bc3-69e2-4eb8-a2c0-76be1cd2c998 | 169.646 | 0.0 | 535.0 | NA2XS2Y 1x400 RM/25 6/10 kV | 0.078 | 10.0 | 0.0942 | simBench | +| 65181464-230a-487b-978f-81e406e9eb22 | 260.752 | 0.0 | 270.0 | NAYY 4x150SE 0.6/1kV | 0.2067 | 0.4 | 0.0804248 | simBench | +| 1200d9eb-6d10-47f3-8543-abea43b128d3 | 273.319 | 0.0 | 357.0 | NAYY 4x240SE 0.6/1kV | 0.1267 | 0.4 | 0.0797965 | simBench | + + +## Caveats + +Nothing - at least not known. +If you found something, please contact us! diff --git a/docs/readthedocs/models/input/grid/line.rst b/docs/readthedocs/models/input/grid/line.rst deleted file mode 100644 index 7a1409c2b..000000000 --- a/docs/readthedocs/models/input/grid/line.rst +++ /dev/null @@ -1,71 +0,0 @@ -.. _line_model: - -Line ----- -Representation of an AC line. - -Attributes, Units and Remarks -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Type Model -"""""""""" - -+-----------+---------+---------------------------------------------+ -| Attribute | Unit | Remarks | -+===========+=========+=============================================+ -| uuid | -- | | -+-----------+---------+---------------------------------------------+ -| id | -- | Human readable identifier | -+-----------+---------+---------------------------------------------+ -| r | Ω / km | Phase resistance per length | -+-----------+---------+---------------------------------------------+ -| x | Ω / km | Phase reactance per length | -+-----------+---------+---------------------------------------------+ -| g | µS / km | Phase-to-ground conductance per length | -+-----------+---------+---------------------------------------------+ -| b | µS / km | Phase-to-ground susceptance per length | -+-----------+---------+---------------------------------------------+ -| iMax | A | Maximum permissible current | -+-----------+---------+---------------------------------------------+ -| vRated | kV | Rated voltage | -+-----------+---------+---------------------------------------------+ - -Entity Model -"""""""""""" - -+-------------------+------+--------------------------------------------------------+ -| Attribute | Unit | Remarks | -+===================+======+========================================================+ -| uuid | -- | | -+-------------------+------+--------------------------------------------------------+ -| id | -- | Human readable identifier | -+-------------------+------+--------------------------------------------------------+ -| operator | -- | | -+-------------------+------+--------------------------------------------------------+ -| operationTime | -- | Timely restriction of operation | -+-------------------+------+--------------------------------------------------------+ -| nodeA | -- | | -+-------------------+------+--------------------------------------------------------+ -| nodeB | -- | | -+-------------------+------+--------------------------------------------------------+ -| parallelDevices | -- | | overall amount of parallel lines to automatically | -| | | | construct (e.g. parallelDevices = 2 will build a | -| | | | total of two lines using the specified parameters) | -+-------------------+------+--------------------------------------------------------+ -| type | -- | | -+-------------------+------+--------------------------------------------------------+ -| length | km | | -+-------------------+------+--------------------------------------------------------+ -| geoPosition | -- | | Line string of geographical locations describing the | -| | | | position of the line | -+-------------------+------+--------------------------------------------------------+ -| olmCharacteristic | -- | | Characteristic of possible overhead line monitoring | -| | | | Can be given in the form of `olm:{}`. | -| | | | The pairs are wind velocity in x and permissible | -| | | | loading in y. | -+-------------------+------+--------------------------------------------------------+ - -Caveats -^^^^^^^ -Nothing - at least not known. -If you found something, please contact us! diff --git a/docs/readthedocs/models/input/grid/transformer2w.md b/docs/readthedocs/models/input/grid/transformer2w.md new file mode 100644 index 000000000..a213b09dc --- /dev/null +++ b/docs/readthedocs/models/input/grid/transformer2w.md @@ -0,0 +1,77 @@ +# Two Winding Transformer + +Model of a two winding transformer. +It is assumed, that node A is the node with higher voltage. + +## Attributes, Units and Remarks + +### Type Model + +All impedances and admittances are given with respect to the higher voltage side. +As obvious, the parameter can be used in T- as in 𝜋-equivalent circuit representations. + +| Attribute | Unit | Remarks | +|:----------|:-----|:--------------------------------------------------------| +| id | | Human readable identifier | +| rSc | Ω | Short circuit resistance | +| xSc | Ω | Short circuit reactance | +| gM | nS | No load conductance | +| bM | nS | No load susceptance | +| sRated | kVA | Rated apparent power | +| vRatedA | kV | Rated voltage at higher voltage terminal | +| vRatedB | kV | Rated voltage at lower voltage terminal | +| dV | % | Voltage magnitude increase per tap position | +| dPhi | ° | Voltage angle increase per tap position | +| tapSide | | true, if tap changer is installed on lower voltage side | +| tapNeutr | | Neutral tap position | +| tapMin | | Minimum tap position | +| tapMax | | Maximum tap position | + +A list with some standard transformer types can be found here: `Standard Two Winding Transformer Types`_ + + +### Entity Model + +| Attribute | Unit | Remarks | +|:----------------|:-----|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| uuid | -- | | +| id | -- | Human readable identifier | +| operator | -- | | +| operationTime | -- | Timely restriction of operation | +| nodeA | -- | Higher voltage node | +| nodeB | -- | Lower voltage node | +| parallelDevices | -- | overall amount of parallel transformers to automatically
construct (e.g. parallelDevices = 2 will build a
total of two transformers using the specified parameters) | +| type | -- | | +| tapPos | -- | Current position of the tap changer | +| autoTap | -- | true, if there is a tap regulation apparent and active | + + +## Standard Two Winding Transformer Types + + +Following there are some standard two winding transformer types with their source. A ``csv file`` containing the types listed +below can be found `here `_. This +file can be used directly for any simulation with ``simona``. +The transformers which source is ``simBench`` are from `here `_. + + +| uuid | bM | dPhi | dV | gM | id | rSc | sRated | tapMax | tapMin | tapNeutr | tapSide | vRatedA | vRatedB | xSc | source | +|:-------------------------------------|--------------------:|-----:|----:|-------------------:|------------------------------------:|--------------------:|---------:|-------:|-------:|---------:|--------:|--------:|--------:|-------------------:|:---------| +| 5a890aae-b9c9-4ebf-8a49-8850ae9df402 | 219.43184927638458 | 0.0 | 1.0 | 1731.3019390581715 | Typ_x_380/220 | 0.6016666666666666 | 600000.0 | 16 | -16 | 0 | false | 380.0 | 220.0 | 44.51926783240413 | simBench | +| 03159c0d-126e-47cc-9871-066870df3a3f | 1193.4686938790917 | 0.0 | 1.0 | 831.0249307479223 | 350MVA_380/110 | 1.0608979591836734 | 350000.0 | 16 | -16 | 0 | false | 380.0 | 110.0 | 9 0.75951402093402 | simBench | +| 7cb289cb-e6af-4470-9c68-e5a91978a5e7 | 2013.800484464662 | 0.0 | 1.0 | 1446.280991735537 | 300MVA_220/110 | 0.20704444444444442 | 300000.0 | 16 | -16 | 0 | false | 220.0 | 110.0 | 19.358892855688435 | simBench | +| 73644bc6-78cf-4882-9837-e6508cab092d | 867.7685950413226 | 0.0 | 1.5 | 1157.0247933884295 | 25 MVA 110/20 kV YNd5 | 1.9843999999999997 | 25000.0 | 9 | -9 | 0 | false | 110.0 | 20.0 | 58.04608993412045 | simBench | +| 6935ae26-374a-4c24-aeee-6d5760d6ddf3 | 720.4791642215993 | 0.0 | 1.5 | 1487.603305785124 | 40 MVA 110/20 kV YNd5 | 1.0285 | 40000.0 | 9 | -9 | 0 | false | 110.0 | 20.0 | 48.994205909984906 | simBench | +| b49db20f-b8b5-4265-8318-f669b9d121e9 | 1015.6886939330394 | 0.0 | 1.5 | 1818.181818181818 | 63 MVA 110/10 kV YNd5 | .6146031746031745 | 63000.0 | 9 | -9 | 0 | false | 110.0 | 10.0 | 34.56596500037509 | simBench | +| 0843b836-cee4-4a8c-81a4-098400fe91cf | 24.495101551166183 | 0.0 | 2.5 | 2999.9999999999995 | 0.4 MVA 20/0.4 kV Dyn5 ASEA | 11.999999999999998 | 400.0 | 2 | -2 | 0 | false | 20.0 | 0.4 | 58.787753826796276 | simBench | +| a8f3aeea-ef4d-4f3c-bb07-09a0a86766c1 | 9.591746452043322 | 0.0 | 2.5 | 1149.9999999999998 | 0.16 MVA 20/0.4 kV DOTE 160/20 SGB | 36.71874999999999 | 160.0 | 2 | -2 | 0 | false | 20.0 | 0.4 | 93.01469452961452 | simBench | +| 0644c120-a247-425f-bbe4-31b153f7f440 | 16.583241729259253 | 0.0 | 2.5 | 2199.9999999999995 | 0.25 MVA 20/0.4 kV Dyn5 ASEA | 21.119999999999997 | 250.0 | 2 | -2 | 0 | false | 20.0 | 0.4 | 93.6479876986153 | simBench | +| bdf22ee4-deba-41f4-a187-ae00638a6880 | 36.47380569074435 | 0.0 | 2.5 | 4125.0 | 0.63 MVA 20/0.4 kV Dyn5 ASEA | 6.953892668178382 | 630.0 | 2 | -2 | 0 | false | 20.0 | 0.4 | 37.45518044666632 | simBench | +| a0cbd90a-4e9f-47db-8dca-041d3a288f77 | 145.8952227629774 | 0.0 | 2.5 | 16500.0 | 0.63 MVA 10/0.4 kV Dyn5 ASEA | 1.7384731670445954 | 630.0 | 2 | -2 | 0 | false | 10.0 | 0.4 | 9.36379511166658 | simBench | + + + +## Caveats + +Nothing - at least not known. +If you found something, please contact us! diff --git a/docs/readthedocs/models/input/grid/transformer2w.rst b/docs/readthedocs/models/input/grid/transformer2w.rst deleted file mode 100644 index 448e3fc54..000000000 --- a/docs/readthedocs/models/input/grid/transformer2w.rst +++ /dev/null @@ -1,82 +0,0 @@ -.. _transformer2w_model: - -Two Winding Transformer ------------------------ -Model of a two winding transformer. -It is assumed, that node A is the node with higher voltage. - -Attributes, Units and Remarks -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Type Model -"""""""""" -All impedances and admittances are given with respect to the higher voltage side. -As obvious, the parameter can be used in T- as in 𝜋-equivalent circuit representations. - -+-----------+------+---------------------------------------------------------+ -| Attribute | Unit | Remarks | -+===========+======+=========================================================+ -| uuid | | | -+-----------+------+---------------------------------------------------------+ -| id | | Human readable identifier | -+-----------+------+---------------------------------------------------------+ -| rSc | Ω | Short circuit resistance | -+-----------+------+---------------------------------------------------------+ -| xSc | Ω | Short circuit reactance | -+-----------+------+---------------------------------------------------------+ -| gM | nS | No load conductance | -+-----------+------+---------------------------------------------------------+ -| bM | nS | No load susceptance | -+-----------+------+---------------------------------------------------------+ -| sRated | kVA | Rated apparent power | -+-----------+------+---------------------------------------------------------+ -| vRatedA | kV | Rated voltage at higher voltage terminal | -+-----------+------+---------------------------------------------------------+ -| vRatedB | kV | Rated voltage at lower voltage terminal | -+-----------+------+---------------------------------------------------------+ -| dV | % | Voltage magnitude increase per tap position | -+-----------+------+---------------------------------------------------------+ -| dPhi | ° | Voltage angle increase per tap position | -+-----------+------+---------------------------------------------------------+ -| tapSide | | true, if tap changer is installed on lower voltage side | -+-----------+------+---------------------------------------------------------+ -| tapNeutr | | Neutral tap position | -+-----------+------+---------------------------------------------------------+ -| tapMin | | Minimum tap position | -+-----------+------+---------------------------------------------------------+ -| tapMax | | Maximum tap position | -+-----------+------+---------------------------------------------------------+ - -Entity Model -"""""""""""" - -+-----------------+------+------------------------------------------------------------+ -| Attribute | Unit | Remarks | -+=================+======+============================================================+ -| uuid | -- | | -+-----------------+------+------------------------------------------------------------+ -| id | -- | Human readable identifier | -+-----------------+------+------------------------------------------------------------+ -| operator | -- | | -+-----------------+------+------------------------------------------------------------+ -| operationTime | -- | Timely restriction of operation | -+-----------------+------+------------------------------------------------------------+ -| nodeA | -- | Higher voltage node | -+-----------------+------+------------------------------------------------------------+ -| nodeB | -- | Lower voltage node | -+-----------------+------+------------------------------------------------------------+ -| parallelDevices | -- | | overall amount of parallel transformers to automatically | -| | | | construct (e.g. parallelDevices = 2 will build a | -| | | | total of two transformers using the specified parameters)| -+-----------------+------+------------------------------------------------------------+ -| type | -- | | -+-----------------+------+------------------------------------------------------------+ -| tapPos | -- | Current position of the tap changer | -+-----------------+------+------------------------------------------------------------+ -| autoTap | -- | true, if there is a tap regulation apparent and active | -+-----------------+------+------------------------------------------------------------+ - -Caveats -^^^^^^^ -Nothing - at least not known. -If you found something, please contact us! diff --git a/docs/readthedocs/models/models.md b/docs/readthedocs/models/models.md new file mode 100644 index 000000000..c0e70ab78 --- /dev/null +++ b/docs/readthedocs/models/models.md @@ -0,0 +1,193 @@ +# Available models +This page gives an overview about all available models in *PowerSystemDataModel*. +They are basically grouped into two groups: + +1. [Input](#input) models may be used to describe input data for a power system simulation +2. [Result](#result) models denote results of such a simulation + +All those models are designed with some assumptions and goals in mind. +To assist you in applying them as intended, we will give you some general remarks: + +**Uniqueness**
+All models have a `uuid` field as universal unique identifier. +There shouldn't be any two elements with the same `uuid` in your grid data set, better in your whole collection +of data sets. + +**Immutability**
+We designed the models in a way, that does not allow for adaptions of the represented data after instantiation of the +objects. +Thereby you can be sure, that your models are *thread-safe* and no unwanted or unobserved changes are made. + +**Copyable**
+With the general design principle of immutability, entity modifications (e.g. updates of field values) can become +hard and annoying. To avoid generating methods to update each field value, we provide an adapted version of the +[Builder pattern](https://en.wikipedia.org/wiki/Builder_pattern/) to make entity modifications as easy as possible. +Each entity holds it's own copy builder class, which follows the same inheritance as the entity class itself. With a +call of `.copy()` on an entity instance a builder instance is returned, that allows for modification of fields and +can be terminated with `.build()` which will return an instance of the entity with modified field values as required. +For the moment, this pattern is only implemented for a small amount of `AssetInput` entities (all entities held by a +`GridContainer` except thermal units to be precise), but we plan to extend this capability to all input entities in the +future. + +**Single Point of Truth**
+Throughout all models you can be sure, that no information is given twice, reducing the possibility to have ambiguous +information in your simulation set up. +"Missing" information can be received through the grids relational information - e.g. if you intend to model a wind +energy converter in detail, you may find information of it's geographical location in the model of it's common +coupling point ([node](/models/input/grid/node)). + +**Harmonized Units System**
+As our models are representations of physical elements, we introduced a harmonized system of units. +The standard units, the models are served with, is given on each element's page. +Thereby you can be sure, that all information are treated the same. +As most (database) sources do not support physical units, make sure, you have your input data transferred to correct +units before. +Same applies for interpreting the obtained results. +In all models physical values are transferred to standard units on instantiation. + +**Equality Checks**
+To represent quantities in the models within an acceptable accuracy, the JSR 385 reference implementation +[Indriya](https://github.com/unitsofmeasurement/indriya) is used. Comparing quantity objects or objects holding quantity +instances is not as trivial as it might seem, because there might be different understandings about the equality of +quantities (e.g. there is a big difference between two instances being equal or equivalent). After long discussions how to +treat quantities in the entity `equals()` method, we agreed on the following rules to be applied: + +- equality check is done by calling `Objects.equals(, )` or + `.equals()`. + Using `Objects.equals(, )` is necessary especially for time series data. + As in contrast to all other places, quantity time series from real world data sometimes are not complete and + hence contain missing values. To represent missing values this is the only place where the usage of `null` + is a valid choice and hence needs to be treated accordingly. Please remember that this is only allowed in very few + places and you should try to avoid using `null` for quantities or any other constructor parameter whenever possible! +- equality is given if, and only if, the quantities value object and unit are exactly equal. Value objects can become + e.g. `BigDecimal` or `Double` instances. It is important, that the object type is also the same, otherwise + the entities `equals()` method returns false. This behavior is in sync with the equals implementation + of the indriya library. Hence, you should ensure that your code always pass in the same kind of a quantity instance + with the same underlying number format and type. For this purpose you should especially be aware of the unit conversion + method `AbstractQuantity.to(Quantity)` which may return seemingly unexpected types, e.g. if called on a quantity + with a `double` typed value, it may return a quantity with a value of either `Double` type or `BigDecimal` type. +- for now, there is no default way to compare entities in a 'number equality' way provided. E.g. a line with a length + of 1km compared to a line with a length of 1000m is actually of the same length, but calling `LineA.equals(LineB)` + would return `false` as the equality check does NOT convert units. If you want to compare two entity instances + based on their equivalence you have (for now) check for each quantity manually using their `isEquivalentTo()` + method. If you think you would benefit from a standard method that allows entity equivalence check, please consider + handing in an issue [Issues](https://github.com/ie3-institute/PowerSystemDataModel/issues). + Furthermore, the current existing implementation of `isEquivalentTo()` in indriya does not allow the provision of + a tolerance threshold that might be necessary when comparing values from floating point operations. We consider + providing such a method in our [PowerSystemUtils](https://github.com/ie3-institute/PowerSystemUtils) library. + If you think you would benefit from such a method, please consider handing in an issue + [Issues](https://github.com/ie3-institute/PowerSystemUtils/issues). + +**Conditional Parameters**
+Some of the models have conditional parameters. When reading model data from a data source, their respective factories for building these +models can handle nulls and empty Strings (as well as any combination of those) safely. E.g.: When given parameters for a line's +`operationTime` where `operationStartTime` and `operationEndTime` are both `null` or `""`, the +factory will build an always-on line model. + +**Validation**
+Information regarding validation of models can be found [here](/io/ValidationUtils). + + +## Input +Model classes you can use to describe a data set as input to power system simulations. + +```{toctree} +--- +maxdepth: 1 +--- +input/operator +``` + +### Grid Related Models + +```{toctree} +--- +maxdepth: 1 +--- +input/grid/node +input/grid/nodegraphic +input/grid/line +input/grid/linegraphic +input/grid/switch +input/grid/transformer2w +input/grid/transformer3w +input/grid/measurementunit +input/grid/gridcontainer +``` + +### Participant Related Models + +```{toctree} +--- +maxdepth: 1 +--- +input/participant/general +input/participant/bm +input/participant/chp +input/participant/ev +input/participant/evcs +input/participant/fixedfeedin +input/participant/hp +input/participant/load +input/participant/pv +input/participant/storage +input/participant/wec +input/participant/thermalbus +input/participant/thermalhouse +input/participant/cylindricalstorage +``` + +### Additional Data +Some models can use additional data for their calculations. + +```{toctree} +--- +maxdepth: 1 +--- +input/additionaldata/timeseries +input/additionaldata/idcoordinatesource +``` + +## Result +Model classes you can use to describe the outcome of a power system simulation. + +### Grid Related Models + +```{toctree} +--- +maxdepth: 1 +--- +result/grid/node +result/grid/connector +result/grid/line +result/grid/switch +result/grid/transformer +result/grid/transformer2w +result/grid/transformer3w +``` + +### Participant Related Models + +```{toctree} +--- +maxdepth: 1 +--- +result/participant/bm +result/participant/chp +result/participant/ev +result/participant/evcs +result/participant/fixedfeedin +result/participant/hp +result/participant/load +result/participant/pv +result/participant/storage +result/participant/wec +result/participant/thermalsink +result/participant/thermalstorage +result/participant/thermalunit +result/participant/thermalhouse +result/participant/cylindricalstorage +result/participant/systemparticipant +result/participant/flexoption +result/participant/em +``` diff --git a/docs/readthedocs/models/models.rst b/docs/readthedocs/models/models.rst deleted file mode 100644 index 9b738fb72..000000000 --- a/docs/readthedocs/models/models.rst +++ /dev/null @@ -1,224 +0,0 @@ -################ -Available models -################ -This page gives an overview about all available models in *PowerSystemDataModel*. -They are basically grouped into three groups: - - 1. `Input`_ models may be used to describe input data for a power system simulation - 2. `Result`_ models denote results of such a simulation - 3. `Time Series`_ may serve both as input or output - -All those models are designed with some assumptions and goals in mind. -To assist you in applying them as intended, we will give you some general remarks: - -Uniqueness - All models have a :code:`uuid` field as universal unique identifier. - There shouldn't be any two elements with the same :code:`uuid` in your grid data set, better in your whole collection - of data sets. - -Immutability - We designed the models in a way, that does not allow for adaptions of the represented data after instantiation of the - objects. - Thereby you can be sure, that your models are *thread-safe* and no unwanted or unobserved changes are made. - -Copyable - With the general design principle of immutability, entity modifications (e.g. updates of field values) can become - hard and annoying. To avoid generating methods to update each field value, we provide an adapted version of the - `builder pattern `__ to make entity modifications as easy as possible. - Each entity holds it's own copy builder class, which follows the same inheritance as the entity class itself. With a - call of `.copy()` on an entity instance a builder instance is returned, that allows for modification of fields and - can be terminated with `.build()` which will return an instance of the entity with modified field values as required. - For the moment, this pattern is only implemented for a small amount of `AssetInput` entities (all entities held by a - `GridContainer` except thermal units to be precise), but we plan to extend this capability to all input entities in the - future. - -Single Point of Truth - Throughout all models you can be sure, that no information is given twice, reducing the possibility to have ambiguous - information in your simulation set up. - "Missing" information can be received through the grids relational information - e.g. if you intend to model a wind - energy converter in detail, you may find information of it's geographical location in the model of it's common - coupling point (:ref:`node`). - -Harmonized Units System - As our models are representations of physical elements, we introduced a harmonized system of units. - The standard units, the models are served with, is given on each element's page. - Thereby you can be sure, that all information are treated the same. - As most (database) sources do not support physical units, make sure, you have your input data transferred to correct - units before. - Same applies for interpreting the obtained results. - In all models physical values are transferred to standard units on instantiation. - -Equality Checks - To represent quantities in the models within an acceptable accuracy, the JSR 385 reference implementation - `Indriya `__ is used. Comparing quantity objects or objects holding quantity - instances is not as trivial as it might seem, because there might be different understandings about the equality of - quantities (e.g. there is a big difference between two instances being equal or equivalent). After long discussions how to - treat quantities in the entity :code:`equals()` method, we agreed on the following rules to be applied: - - - equality check is done by calling :code:`Objects.equals(, )` or - :code:`.equals()`. - Using :code:`Objects.equals(, )` is necessary especially for time series data. - As in contrast to all other places, quantity time series from real world data sometimes are not complete and - hence contain missing values. To represent missing values this is the only place where the usage of :code:`null` - is a valid choice and hence needs to be treated accordingly. Please remember that this is only allowed in very few - places and you should try to avoid using :code:`null` for quantities or any other constructor parameter whenever possible! - - equality is given if, and only if, the quantities value object and unit are exactly equal. Value objects can become - e.g. :code:`BigDecimal` or :code:`Double` instances. It is important, that the object type is also the same, otherwise - the entities :code:`equals()` method returns false. This behavior is in sync with the equals implementation - of the indriya library. Hence, you should ensure that your code always pass in the same kind of a quantity instance - with the same underlying number format and type. For this purpose you should especially be aware of the unit conversion - method :code:`AbstractQuantity.to(Quantity)` which may return seemingly unexpected types, e.g. if called on a quantity - with a :code:`double` typed value, it may return a quantity with a value of either :code:`Double` type or :code:`BigDecimal` type. - - for now, there is no default way to compare entities in a 'number equality' way provided. E.g. a line with a length - of 1km compared to a line with a length of 1000m is actually of the same length, but calling :code:`LineA.equals(LineB)` - would return :code:`false` as the equality check does NOT convert units. If you want to compare two entity instances - based on their equivalence you have (for now) check for each quantity manually using their :code:`isEquivalentTo()` - method. If you think you would benefit from a standard method that allows entity equivalence check, please consider - handing in an issue `here `__. - Furthermore, the current existing implementation of :code:`isEquivalentTo()` in indriya does not allow the provision of - a tolerance threshold that might be necessary when comparing values from floating point operations. We consider - providing such a method in our `PowerSystemUtils `__ library. - If you think you would benefit from such a method, please consider handing in an issue - `here `__. - -Conditional Parameters - Some of the models have conditional parameters. When reading model data from a data source, their respective factories for building these - models can handle nulls and empty Strings (as well as any combination of those) safely. E.g.: When given parameters for a line's - :code:`operationTime` where :code:`operationStartTime` and :code:`operationEndTime` are both :code:`null` or :code:`""`, the - factory will build an always-on line model. - -***** -Input -***** -Model classes you can use to describe a data set as input to power system simulations. - -.. toctree:: - :maxdepth: 1 - - input/operator - -Grid Related Input Models -========================= -.. toctree:: - :maxdepth: 1 - - input/grid/node - input/grid/nodegraphic - input/grid/line - input/grid/linegraphic - input/grid/switch - input/grid/transformer2w - input/grid/transformer3w - input/grid/measurementunit - input/grid/gridcontainer - -Participant Related Input Models -================================ -.. toctree:: - :maxdepth: 1 - - input/participant/general - input/participant/bm - input/participant/chp - input/participant/ev - input/participant/evcs - input/participant/fixedfeedin - input/participant/hp - input/participant/load - input/participant/pv - input/participant/storage - input/participant/wec - input/participant/thermalbus - input/participant/thermalhouse - input/participant/cylindricalstorage - -****** -Result -****** -Model classes you can use to describe the outcome of a power system simulation. - -Grid Related Result Models -========================== -.. toctree:: - :maxdepth: 1 - - result/grid/node - result/grid/connector - result/grid/line - result/grid/switch - result/grid/transformer - result/grid/transformer2w - result/grid/transformer3w - -Participant Related Result Models -================================= -.. toctree:: - :maxdepth: 1 - - result/participant/bm - result/participant/chp - result/participant/ev - result/participant/evcs - result/participant/fixedfeedin - result/participant/hp - result/participant/load - result/participant/pv - result/participant/storage - result/participant/wec - result/participant/thermalsink - result/participant/thermalstorage - result/participant/thermalunit - result/participant/thermalhouse - result/participant/cylindricalstorage - result/participant/systemparticipant - -*********** -Time Series -*********** -Time series are meant to represent a timely ordered series of values. -Those can either be electrical or non-electrical depending on what one may need for power system simulations. -Our time series models are divided into two subtypes: - -.. _individual_time_series: - -Individual Time Series - Each time instance in this time series has its own value (random duplicates may occur obviously). - They are only applicable for the time frame that is defined by the content of the time series. - -.. _repetitive_time_series: - -Repetitive Time Series - Those time series do have repetitive values, e.g. each day or at any other period. - Therefore, they can be applied to any time frame, as the mapping from time instant to value is made by information - reduction. - In addition to actual data, a mapping function has to be known. - -To be as flexible, as possible, the actual content of the time series is given as children of the :code:`Value` class. -The following different values are available: - -+-------------------------------+------------------------------------------------------------------+ -| Value Class | Purpose | -+===============================+==================================================================+ -| :code:`PValue` | Electrical active power | -+-------------------------------+------------------------------------------------------------------+ -| :code:`SValue` | Electrical active and reactive power | -+-------------------------------+------------------------------------------------------------------+ -| :code:`HeatAndPValue` | | Combination of thermal power (e.g. in kW) | -| | | and electrical active power (e.g. in kW) | -+-------------------------------+------------------------------------------------------------------+ -| :code:`HeatAndSValue` | | Combination of thermal power (e.g. in kW) | -| | | and electrical active and reactive power (e.g. in kW and kVAr) | -+-------------------------------+------------------------------------------------------------------+ -| :code:`EnergyPriceValue` | Wholesale market price (e.g. in € / MWh) | -+-------------------------------+------------------------------------------------------------------+ -| :code:`SolarIrradianceValue` | Combination of diffuse and direct solar irradiance | -+-------------------------------+------------------------------------------------------------------+ -| :code:`TemperatureValue` | Temperature information | -+-------------------------------+------------------------------------------------------------------+ -| :code:`WindValue` | Combination of wind direction and wind velocity | -+-------------------------------+------------------------------------------------------------------+ -| :code:`WeatherValue` | Combination of irradiance, temperature and wind information | -+-------------------------------+------------------------------------------------------------------+ - -.. include:: ValidationUtils.rst - diff --git a/docs/readthedocs/models/result/grid/switch.rst b/docs/readthedocs/models/result/grid/switch.rst index bde8c39eb..aa11cda90 100644 --- a/docs/readthedocs/models/result/grid/switch.rst +++ b/docs/readthedocs/models/result/grid/switch.rst @@ -16,14 +16,6 @@ Attributes, Units and Remarks +---------------+----------------+----------------------------------------------------------+ | inputModel | -- | uuid for the associated input model | +---------------+----------------+----------------------------------------------------------+ -| iAMag | ampere | A stands for sending node | -+---------------+----------------+----------------------------------------------------------+ -| iAAng | degree | | -+---------------+----------------+----------------------------------------------------------+ -| iBMag | ampere | B stands for receiving node | -+---------------+----------------+----------------------------------------------------------+ -| iBAng | degree | | -+---------------+----------------+----------------------------------------------------------+ | closed | boolean | status of the switching device | +---------------+----------------+----------------------------------------------------------+ diff --git a/docs/readthedocs/models/result/participant/em.rst b/docs/readthedocs/models/result/participant/em.rst new file mode 100644 index 000000000..90f47b1cb --- /dev/null +++ b/docs/readthedocs/models/result/participant/em.rst @@ -0,0 +1,29 @@ +.. _em_result: + +Energy Management +----------------- +Result of an energy management entity. + +Attributes, Units and Remarks +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ++---------------+---------+----------------------------------------------------------------------------+ +| Attribute | Unit | Remarks | ++===============+=========+============================================================================+ +| uuid | -- | uuid for the result entity | ++---------------+---------+----------------------------------------------------------------------------+ +| time | -- | date and time for the produced result | ++---------------+---------+----------------------------------------------------------------------------+ +| inputModel | -- | uuid for the associated input model | ++---------------+---------+----------------------------------------------------------------------------+ +| p | MW | active power output | ++---------------+---------+----------------------------------------------------------------------------+ +| q | MW | reactive power output | ++---------------+---------+----------------------------------------------------------------------------+ + + +Caveats +^^^^^^^ +Nothing - at least not known. +If you found something, please contact us! + diff --git a/docs/readthedocs/models/result/participant/flexoption.rst b/docs/readthedocs/models/result/participant/flexoption.rst new file mode 100644 index 000000000..e2abe9547 --- /dev/null +++ b/docs/readthedocs/models/result/participant/flexoption.rst @@ -0,0 +1,30 @@ +.. _flexoption_result: + +Flexibility Option +------------------ +Result of a flexibility option. + +Attributes, Units and Remarks +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ++---------------+---------+----------------------------------------------------------------------------+ +| Attribute | Unit | Remarks | ++===============+=========+============================================================================+ +| uuid | -- | uuid for the result entity | ++---------------+---------+----------------------------------------------------------------------------+ +| time | -- | date and time for the produced result | ++---------------+---------+----------------------------------------------------------------------------+ +| inputModel | -- | uuid for the associated input model | ++---------------+---------+----------------------------------------------------------------------------+ +| pRef | MW | active power that was suggested for regular usage by the system participant| ++---------------+---------+----------------------------------------------------------------------------+ +| pMin | MW | active minimal power that was determined by the system participant | ++---------------+---------+----------------------------------------------------------------------------+ +| pMax | MW | active maximum power that was determined by the system participant | ++---------------+---------+----------------------------------------------------------------------------+ + +Caveats +^^^^^^^ +Nothing - at least not known. +If you found something, please contact us! + diff --git a/docs/readthedocs/models/result/participant/hp.rst b/docs/readthedocs/models/result/participant/hp.rst index e5fe4f2e1..5c51b59c4 100644 --- a/docs/readthedocs/models/result/participant/hp.rst +++ b/docs/readthedocs/models/result/participant/hp.rst @@ -1,7 +1,7 @@ .. _hp_result: -Load ----- +Heat Pump +--------- Result of a heat pump. Attributes, Units and Remarks diff --git a/docs/readthedocs/requirements.txt b/docs/readthedocs/requirements.txt index 605bbfa69..36dad5c08 100644 --- a/docs/readthedocs/requirements.txt +++ b/docs/readthedocs/requirements.txt @@ -1,4 +1,6 @@ commonmark==0.9.1 recommonmark==0.7.1 -Sphinx==5.3.0 -sphinx-rtd-theme==1.2.0 +Sphinx==6.2.1 +sphinx-rtd-theme==1.2.2 +myst-parser==2.0.0 +markdown-it-py==3.0.0 diff --git a/docs/uml/main/DataSourceClassDiagram.puml b/docs/uml/main/DataSourceClassDiagram.puml index 77a8e2976..5f05c566f 100644 --- a/docs/uml/main/DataSourceClassDiagram.puml +++ b/docs/uml/main/DataSourceClassDiagram.puml @@ -2,197 +2,166 @@ note "Assuming all classes to implement \nthe abstract methods of their interfaces\n\n" as generalNotes -interface DataSource - -interface TypeSource { - {abstract} Set getTransformer2WTypes() - {abstract} Set getTransformer3WTypes() - {abstract} Set getOperators() - {abstract} Set getLineTypes() - {abstract} Set getBmTypes() - {abstract} Set getChpTypes() - {abstract} Set getHpTypes() - {abstract} Set getStorageTypes() - {abstract} Set getWecTypes() - {abstract} Set getEvTypes() -} -DataSource <|-- TypeSource - -interface ThermalSource { - {abstract} Set getThermalBuses() - {abstract} Set getThermalBuses(Set) - {abstract} Set getThermalStorages() - {abstract} Set getThermalStorages(Set, Set) - {abstract} Set getThermalHouses() - {abstract} Set getThermalHouses(Set, Set) - {abstract} Set getCylindricStorages() - {abstract} Set getCylindricStorages(Set, Set) -} -DataSource <|-- ThermalSource - -interface RawGridSource { - {abstract} Optional getGridData() - {abstract} Set getNodes() - {abstract} Set getNodes(Set) - {abstract} Set getLines() - {abstract} Set getLines(Set, Set, Set) - {abstract} Set get2WTransformers() - {abstract} Set get2WTransformers(Set, Set, Set) - {abstract} Set get3WTransformers() - {abstract} Set get3WTransformers(Set, Set, Set) - {abstract} Set getSwitches() - {abstract} Set getSwitches(Set, Set) - {abstract} Set getMeasurementUnits() - {abstract} Set getMeasurementUnits(Set, Set) -} -DataSource <|-- RawGridSource - -interface SystemParticipantSource{ - {abstract} Optional getSystemParticipants() - {abstract} Set getBmPlants() - {abstract} Set getBmPlants(Set, Set, Set) - {abstract} Set getChpPlants() - {abstract} Set getChpPlants(Set, Set, Set, Set, Set) - {abstract} Set getEvs() - {abstract} Set getEvs(Set, Set, Set) - {abstract} Set getEvCS() - {abstract} Set getEvCS(Set, Set) - {abstract} Set getFixedFeedIns() - {abstract} Set getFixedFeedIns(Set, Set) - {abstract} Set getHeatPumps() - {abstract} Set getHeatPumps(Set, Set, Set, Set) - {abstract} Set getLoads() - {abstract} Set getLoads(Set, Set) - {abstract} Set getPvPlants() - {abstract} Set getPvPlants(Set, Set) - {abstract} Set getStorages() - {abstract} Set getStorages(Set, Set, Set) - {abstract} Set getWecPlants() - {abstract} Set getWecPlants(Set, Set, Set) -} -DataSource <|-- SystemParticipantSource - -interface GraphicSource { - {abstract} Optional getGraphicElements() - {abstract} Set getNodeGraphicInput() - {abstract} Set getNodeGraphicInput(Set) - {abstract} Set getLineGraphicInput() - {abstract} Set getLineGraphicInput(Set) -} -DataSource <|-- GraphicSource - -interface WeatherSource { - {abstract} Map> getWeather(ClosedInterval) - {abstract} Map> getWeather(ClosedInterval, Collection) - {abstract} WeatherValue getWeather(ZonedDateTime date, Point coordinate) -} -DataSource <|-- WeatherSource - -interface TimeSeriesMappingSource { - {abstract} Map getMapping() - Optional getTimeSeriesUuid(UUID) - {abstract} Optional getTimeSeriesMetaInformation(UUID) -} -DataSource <|-- TimeSeriesMappingSource +interface DataSource { + {abstract} Stream> getSourceData(Class entityClass) +} + +DataSource <|-- CsvDataSource +DataSource <|-- SqlDataSource + +' Implementations + +class CsvDataSource { + - String csvSep + # CsvFileConnector connector + + CsvDataSource(String, String, FileNamingStrategy) +} + +class SqlDataSource { + - String schemaName + # SqlConnector connector + # DatabaseNamingStrategy databaseNamingStrategy + + SqlDataSource(String, String, DatabaseNamingStrategy) +} + +abstract class WeatherSource { + - TimeBasedWeatherValueFactory weatherFactory + - Map> coordinateToTimeSeries + - IdCoordinateSource idCoordinateSource + - {abstract} Map> getWeather(ClosedInterval) + - {abstract} Map> getWeather(ClosedInterval, Collection) + - {abstract} Optional> getWeather(ZonedDateTime, Point) +} + +WeatherSource <|-- CsvWeatherSource +WeatherSource <|-- SqlWeatherSource +WeatherSource <|-- InfluxDbWeatherSource +WeatherSource <|-- CouchbaseWeatherSource + +class CsvWeatherSource { + - CsvDataSource dataSource + - CsvWeatherSource(String, String, FileNamingStrategy, IdCoordinateSource, TimeBasedWeatherValueFactory) +} +class SqlWeatherSource { + - SqlDataSource dataSource + - String factoryCoordinateFieldName + - SqlWeatherSource(SqlConnector, IdCoordinateSource, String, String, TimeBasedWeatherValueFactory) +} +class InfluxDbWeatherSource { + - InfluxDbConnector connector + - InfluxDbWeatherSource(InfluxDbConnector, IdCoordinateSource, TimeBasedWeatherValueFactory) + - IndividualTimeSeries getWeather(ClosedInterval, Point) +} +class CouchbaseWeatherSource { + - CouchbaseConnector connector, + - CouchbaseWeatherSource(CouchbaseConnector, IdCoordinateSource, String, TimeBasedWeatherValueFactory, String) + - CouchbaseWeatherSource(CouchbaseConnector, IdCoordinateSource, String, String, TimeBasedWeatherValueFactory, String) + - String generateWeatherKey(ZonedDateTime, Integer) + - String createQueryStringForIntervalAndCoordinate(ClosedInterval, int) +} + +abstract class TimeSeriesMappingSource { + - Map getMapping() + - Optional getTimeSeriesUuid(UUID) + {abstract} Stream> getMappingSourceData() +} + +TimeSeriesMappingSource <|-- CsvTimeSeriesMappingSource +TimeSeriesMappingSource <|-- SqlTimeSeriesMappingSource class CsvTimeSeriesMappingSource { - - TimeSeriesMappingFactory mappingFactory - - Map mapping + - CsvDataSource dataSource + - CsvTimeSeriesMappingSource(String, String, FileNamingStrategy) +} + +class SqlTimeSeriesMappingSource { + - EntityPersistenceNamingStrategy entityPersistenceNamingStrategy + - SqlDataSource dataSource + - SqlTimeSeriesMappingSource(SqlConnector, String, EntityPersistenceNamingStrategy) } -TimeSeriesMappingSource <|.. CsvTimeSeriesMappingSource -CsvDataSource <|-- CsvTimeSeriesMappingSource interface TimeSeriesSource { {abstract} IndividualTimeSeries getTimeSeries() {abstract} IndividualTimeSeries getTimeSeries(ClosedInterval)) {abstract} Optional getValue(ZonedDateTime) } -DataSource <|-- TimeSeriesSource + +TimeSeriesSource <|-- CsvTimeSeriesSource +TimeSeriesSource <|-- SqlTimeSeriesSource class CsvTimeSeriesSource { - IndividualTimeSeries timeSeries + - CsvDataSource dataSource + {static} CsvTimeSeriesSource getSource(\n\tString,\n\tString,\n\tEntityPersistenceNamingStrategy,\n\tCsvFileConnector.CsvIndividualTimeSeriesMetaInformation) - - IndividualTimeSeries buildIndividualTimeSeries(\n\tUUID,\n\tfilePath,\n\tFunction,\n\tOptional>>) - - Optional> buildTimeBasedValue(\n\tMap,\n\tClass,\n\tTimeBasedSimpleValueFactory) + - CsvTimeSeriesSource(\n\tString,\n\tString,\n\tFileNamingStrategy,\n\tUUID,\n\tString,\n\tClass,\n\tTimeBasedSimpleValueFactory) } -TimeSeriesSource <|.. CsvTimeSeriesSource -CsvDataSource <|-- CsvTimeSeriesSource -interface DataConnector { - {abstract} shutdown() +class SqlTimeSeriesSource { + - SqlDataSource dataSource + - UUID timeSeriesUuid + - Class valueClass + - TimeBasedSimpleValueFactory valueFactory + - SqlTimeSeriesSource(SqlDataSource, UUID, Class, TimeBasedSimpleValueFactory) + - SqlTimeSeriesSource(SqlConnector, String, DatabaseNamingStrategy, UUID, Class, TimeBasedSimpleValueFactory) + + {static} SqlTimeSeriesSource createSource(SqlConnector, String, DatabaseNamingStrategy, IndividualTimeSeriesMetaInformation, String) + + {static} SqlTimeSeriesSource create(SqlConnector, String, DatabaseNamingStrategy, UUID, Class, String) } -' Implementations +interface TimeSeriesMetaInformationSource { + - {abstract} Map getTimeSeriesMetaInformation() + - {abstract} Optional getTimeSeriesMetaInformation(UUID) +} -Abstract Class CsvDataSource { - - String csvSep - # CsvFileConnector connector - + CsvDataSource(String, String, EntityPersistenceNamingStrategy) -} -DataSource <|.. CsvDataSource - -Class CsvTypeSource { - - OperatorInputFactory operatorInputFactory - - Transformer2WTypeInputFactory transformer2WTypeInputFactory - - LineTypeInputFactory lineTypeInputFactory - - Transformer3WTypeInputFactory transformer3WTypeInputFactory - - SystemParticipantTypeInputFactory systemParticipantTypeInputFactory - + CsvTypeSource(String, String, EntityPersistenceNamingStrategy) -} -TypeSource <|.. CsvTypeSource -CsvDataSource <|-- CsvTypeSource - -Class CsvThermalSource { - - TypeSource typeSource - - ThermalBusInputFactory thermalBusInputFactory - - CylindricalStorageInputFactory cylindricalStorageInputFactory - - ThermalHouseInputFactory thermalHouseInputFactory - + CsvThermalSource(String, String, EntityPersistenceNamingStrategy, TypeSource) -} -ThermalSource <|.. CsvThermalSource -CsvDataSource <|-- CsvThermalSource - -Class CsvRawGridSource { - - TypeSource typeSource - - NodeInputFactory nodeInputFactory - - LineInputFactory lineInputFactory - - Transformer2WInputFactory transformer2WInputFactory - - Transformer3WInputFactory transformer3WInputFactory - - SwitchInputFactory switchInputFactory - - MeasurementUnitInputFactory measurementUnitInputFactory - + CsvRawGridSource(String, String, EntityPersistenceNamingStrategy, TypeSource) -} -RawGridSource <|.. CsvRawGridSource -CsvDataSource <|-- CsvRawGridSource - -Class CsvSystemParticipantSource { - - TypeSource typeSource - - RawGridSource rawGridSource - - ThermalSource thermalSource - - BmInputFactory bmInputFactory - - ChpInputFactory chpInputFactory - - EvInputFactory evInputFactory - - FixedFeedInInputFactory fixedFeedInInputFactory - - HpInputFactory hpInputFactory - - LoadInputFactory loadInputFactory - - PvInputFactory pvInputFactory - - StorageInputFactory storageInputFactory - - WecInputFactory wecInputFactory - - EvcsInputFactory evcsInputFactory - + CsvSystemParticipantSource(String, String, EntityPersistenceNamingStrategy, TypeSource, ThermalSource, RawGridSource) -} -SystemParticipantSource <|.. CsvSystemParticipantSource -CsvDataSource <|-- CsvSystemParticipantSource - -Class CsvGraphicSource { - - TypeSource typeSource - - RawGridSource rawGridSource - - LineGraphicInputFactory lineGraphicInputFactory - - NodeGraphicInputFactory nodeGraphicInputFactory - + CsvGraphicSource(String, String, EntityPersistenceNamingStrategy, TypeSource, RawGridSource) -} -GraphicSource <|.. CsvGraphicSource -CsvDataSource <|-- CsvGraphicSource +TimeSeriesMetaInformationSource <|.. CsvTimeSeriesMetaInformationSource +TimeSeriesMetaInformationSource <|.. SqlTimeSeriesMetaInformationSource + +class CsvTimeSeriesMetaInformationSource { + - CsvDataSource dataSource + - Map timeSeriesMetaInformation + - CsvTimeSeriesMetaInformationSource(String, String, FileNamingStrategy) +} + +class SqlTimeSeriesMetaInformationSource { + - DatabaseNamingStrategy namingStrategy + - Map mapping + - SqlDataSource dataSource + - SqlTimeSeriesMetaInformationSource(SqlConnector, String, DatabaseNamingStrategy) +} + + + + + + +interface IdCoordinateSource { + {abstract} Optional getCoordinate(int) + {abstract} Collection getCoordinates(int...) + {abstract} Optional getId(Point) + {abstract} Collection getAllCoordinates() + {abstract} List getNearestCoordinates(Point, int) + {abstract} List getClosestCoordinates(Point, int, ComparableQuantity) +} + +IdCoordinateSource <|.. CsvIdCoordinateSource +IdCoordinateSource <|.. SqlIdCoordinateSource + +class CsvIdCoordinateSource { + - Map idToCoordinate + - Map coordinateToId + - CsvDataSource dataSource + - IdCoordinateFactory factory + - CsvIdCoordinateSource(IdCoordinateFactory, CsvDataSource) +} + +class SqlIdCoordinateSource { + - SqlDataSource dataSource + - SqlIdCoordinateFactory factory + - SqlIdCoordinateSource(SqlIdCoordinateFactory, String, SqlDataSource) +} + +interface DataConnector { + {abstract} shutdown() +} Class CsvFileConnector { - Map, BufferedCsvWriter> entityWriters diff --git a/docs/uml/main/EntitySourceClassDiagram.puml b/docs/uml/main/EntitySourceClassDiagram.puml new file mode 100644 index 000000000..a160bc739 --- /dev/null +++ b/docs/uml/main/EntitySourceClassDiagram.puml @@ -0,0 +1,157 @@ +@startuml + +note "Assuming all classes to implement \nthe abstract methods of their interfaces\n\n" as generalNotes + +abstract class EntitySource { + - DataSource dataSource + - Set buildNodeAssetEntities(Class, EntityFactory, Collection, Collection, ConcurrentMap, LongAdder>) + - Set buildNodeAssetEntities(Class, EntityFactory, Collection, Collection) + - Set buildAssetInputEntities(Class, EntityFactory, Collection) + - Set buildAssetInputEntities(Class, EntityFactory) +} +EntitySource <|-- GraphicSource +EntitySource <|-- RawGridSource +EntitySource <|-- ResultEntitySource +EntitySource <|-- SystemParticipantSource +EntitySource <|-- ThermalSource +EntitySource <|-- TypeSource + +class GraphicSource { + - TypeSource typeSource + - RawGridSource rawGridSource + - LineGraphicInputFactory lineGraphicInputFactory + - NodeGraphicInputFactory nodeGraphicInputFactory + - GraphicSource(TypeSource, RawGridSource, DataSource) + - Optional getGraphicElements() + - Set getNodeGraphicInput() + - Set getNodeGraphicInput(Set) + - Set getLineGraphicInput() + - Set getLineGraphicInput(Set) +} + +class RawGridSource { + - TypeSource typeSource + - NodeInputFactory nodeInputFactory + - LineInputFactory lineInputFactory + - Transformer2WInputFactory transformer2WInputFactory + - Transformer3WInputFactory transformer3WInputFactory + - SwitchInputFactory switchInputFactory + - MeasurementUnitInputFactory measurementUnitInputFactory + - Optional getGridData() + - Set getNodes() + - Set getNodes(Set) + - Set getLines() + - Set getLines(Set, Set, Set) + - Set get2WTransformers() + - Set get2WTransformers(Set, Set, Set) + - Set get3WTransformers() + - Set get3WTransformers(Set, Set, Set) + - Set getSwitches() + - Set getSwitches(Set, Set) + - Set getMeasurementUnits() + - Set getMeasurementUnits(Set, Set) +} + +class ResultEntitySource { + - SystemParticipantResultFactory systemParticipantResultFactory + - ThermalResultFactory thermalResultFactory + - SwitchResultFactory switchResultFactory + - NodeResultFactory nodeResultFactory + - ConnectorResultFactory connectorResultFactory + - FlexOptionsResultFactory flexOptionsResultFactory + - ResultEntitySource(DataSource) + - ResultEntitySource(DataSource, String) + - Set getNodeResults() + - Set getSwitchResults() + - Set getLineResults() + - Set getTransformer2WResultResults() + - Set getTransformer3WResultResults() + - Set getFlexOptionsResults() + - Set getLoadResults() + - Set getPvResults() + - Set getFixedFeedInResults() + - Set getBmResults() + - Set getChpResults() + - Set getWecResults() + - Set getStorageResults() + - Set getEvcsResults() + - Set getEvResults() + - Set getHpResults() + - Set getCylindricalStorageResult() + - Set getThermalHouseResults() + - Set getEmResults() +} + +class SystemParticipantSource{ + - TypeSource typeSource + - RawGridSource rawGridSource + - ThermalSource thermalSource + - BmInputFactory bmInputFactory + - ChpInputFactory chpInputFactory + - EvInputFactory evInputFactory + - FixedFeedInInputFactory fixedFeedInInputFactory + - HpInputFactory hpInputFactory + - LoadInputFactory loadInputFactory + - PvInputFactory pvInputFactory + - StorageInputFactory storageInputFactory + - WecInputFactory wecInputFactory + - EvcsInputFactory evcsInputFactory + - SystemParticipantSource(TypeSource, ThermalSource, RawGridSource, DataSource) + - Optional getSystemParticipants() + - Set getBmPlants() + - Set getBmPlants(Set, Set, Set) + - Set getChpPlants() + - Set getChpPlants(Set, Set, Set, Set, Set) + - Set getEvs() + - Set getEvs(Set, Set, Set) + - Set getEvCS() + - Set getEvCS(Set, Set) + - Set getFixedFeedIns() + - Set getFixedFeedIns(Set, Set) + - Set getHeatPumps() + - Set getHeatPumps(Set, Set, Set, Set) + - Set getLoads() + - Set getLoads(Set, Set) + - Set getPvPlants() + - Set getPvPlants(Set, Set) + - Set getStorages() + - Set getStorages(Set, Set, Set) + - Set getWecPlants() + - Set getWecPlants(Set, Set, Set) +} + +class ThermalSource { + - TypeSource typeSource + - ThermalBusInputFactory thermalBusInputFactory + - CylindricalStorageInputFactory cylindricalStorageInputFactory + - ThermalHouseInputFactory thermalHouseInputFactory + - ThermalSource(TypeSource, DataSource) + - Set getThermalBuses() + - Set getThermalBuses(Set) + - Set getThermalStorages() + - Set getThermalStorages(Set, Set) + - Set getThermalHouses() + - Set getThermalHouses(Set, Set) + - Set getCylindricStorages() + - Set getCylindricStorages(Set, Set) +} + +class TypeSource { + - OperatorInputFactory operatorInputFactory + - Transformer2WTypeInputFactory transformer2WTypeInputFactory + - LineTypeInputFactory lineTypeInputFactory + - Transformer3WTypeInputFactory transformer3WTypeInputFactory + - SystemParticipantTypeInputFactory systemParticipantTypeInputFactory + - TypeSource(DataSource) + - Set getTransformer2WTypes() + - Set getTransformer3WTypes() + - Set getOperators() + - Set getLineTypes() + - Set getBmTypes() + - Set getChpTypes() + - Set getHpTypes() + - Set getStorageTypes() + - Set getWecTypes() + - Set getEvTypes() +} +@enduml \ No newline at end of file diff --git a/docs/uml/main/FunctionalDataSourceClassDiagram.puml b/docs/uml/main/FunctionalDataSourceClassDiagram.puml new file mode 100644 index 000000000..5504656ec --- /dev/null +++ b/docs/uml/main/FunctionalDataSourceClassDiagram.puml @@ -0,0 +1,26 @@ +@startuml + +note "Assuming all classes to implement \nthe abstract methods of their interfaces\n\n" as generalNotes + +interface DataSource { + {abstract} Stream> getSourceData(Class entityClass) +} + +DataSource <|-- CsvDataSource +DataSource <|-- SqlDataSource + +' Implementations + +class CsvDataSource { + - String csvSep + # CsvFileConnector connector + + CsvDataSource(String, String, FileNamingStrategy) +} + +class SqlDataSource { + - String schemaName + # SqlConnector connector + # DatabaseNamingStrategy databaseNamingStrategy + + SqlDataSource(String, String, DatabaseNamingStrategy) +} +@enduml \ No newline at end of file diff --git a/docs/uml/main/TimeSeriesSourceClassDiagram.puml b/docs/uml/main/TimeSeriesSourceClassDiagram.puml new file mode 100644 index 000000000..56d66b729 --- /dev/null +++ b/docs/uml/main/TimeSeriesSourceClassDiagram.puml @@ -0,0 +1,73 @@ +@startuml + +note "Assuming all classes to implement \nthe abstract methods of their interfaces\n\n" as generalNotes + +abstract class TimeSeriesSource { + - Class valueClass + - TimeBasedSimpleValueFactory valueFactory + - {abstract} IndividualTimeSeries getTimeSeries() + - {abstract} IndividualTimeSeries getTimeSeries(ClosedInterval)) + - {abstract} Optional getValue(ZonedDateTime) +} + +TimeSeriesSource <|-- CsvTimeSeriesSource +TimeSeriesSource <|-- SqlTimeSeriesSource + +class CsvTimeSeriesSource { + - IndividualTimeSeries timeSeries + - CsvDataSource dataSource + + {static} CsvTimeSeriesSource getSource(\n\tString,\n\tString,\n\tEntityPersistenceNamingStrategy,\n\tCsvFileConnector.CsvIndividualTimeSeriesMetaInformation) + - CsvTimeSeriesSource(\n\tString,\n\tString,\n\tFileNamingStrategy,\n\tUUID,\n\tString,\n\tClass,\n\tTimeBasedSimpleValueFactory) +} + +class SqlTimeSeriesSource { + - SqlDataSource dataSource + - UUID timeSeriesUuid + - SqlTimeSeriesSource(SqlDataSource, UUID, Class, TimeBasedSimpleValueFactory) + - SqlTimeSeriesSource(SqlConnector, String, DatabaseNamingStrategy, UUID, Class, TimeBasedSimpleValueFactory) + + {static} SqlTimeSeriesSource createSource(SqlConnector, String, DatabaseNamingStrategy, IndividualTimeSeriesMetaInformation, String) + + {static} SqlTimeSeriesSource create(SqlConnector, String, DatabaseNamingStrategy, UUID, Class, String) +} + +abstract class TimeSeriesMappingSource { + - TimeSeriesMappingFactory mappingFactory + - Map getMapping() + - Optional getTimeSeriesUuid(UUID) + {abstract} Stream> getMappingSourceData() +} + +TimeSeriesMappingSource <|-- CsvTimeSeriesMappingSource +TimeSeriesMappingSource <|-- SqlTimeSeriesMappingSource + +class CsvTimeSeriesMappingSource { + - CsvDataSource dataSource + - CsvTimeSeriesMappingSource(String, String, FileNamingStrategy) +} + +class SqlTimeSeriesMappingSource { + - EntityPersistenceNamingStrategy entityPersistenceNamingStrategy + - SqlDataSource dataSource + - SqlTimeSeriesMappingSource(SqlConnector, String, EntityPersistenceNamingStrategy) +} + +interface TimeSeriesMetaInformationSource { + - {abstract} Map getTimeSeriesMetaInformation() + - {abstract} Optional getTimeSeriesMetaInformation(UUID) +} + +TimeSeriesMetaInformationSource <|.. CsvTimeSeriesMetaInformationSource +TimeSeriesMetaInformationSource <|.. SqlTimeSeriesMetaInformationSource + +class CsvTimeSeriesMetaInformationSource { + - CsvDataSource dataSource + - Map timeSeriesMetaInformation + - CsvTimeSeriesMetaInformationSource(String, String, FileNamingStrategy) +} + +class SqlTimeSeriesMetaInformationSource { + - DatabaseNamingStrategy namingStrategy + - Map mapping + - SqlDataSource dataSource + - SqlTimeSeriesMetaInformationSource(SqlConnector, String, DatabaseNamingStrategy) +} +@enduml \ No newline at end of file diff --git a/docs/uml/main/WeatherCoordinateSourceClassDiagram.puml b/docs/uml/main/WeatherCoordinateSourceClassDiagram.puml new file mode 100644 index 000000000..60716fb34 --- /dev/null +++ b/docs/uml/main/WeatherCoordinateSourceClassDiagram.puml @@ -0,0 +1,67 @@ +@startuml + +note "Assuming all classes to implement \nthe abstract methods of their interfaces\n\n" as generalNotes + +abstract class WeatherSource { + - TimeBasedWeatherValueFactory weatherFactory + - Map> coordinateToTimeSeries + - IdCoordinateSource idCoordinateSource + - {abstract} Map> getWeather(ClosedInterval) + - {abstract} Map> getWeather(ClosedInterval, Collection) + - {abstract} Optional> getWeather(ZonedDateTime, Point) +} + +WeatherSource <|-- CsvWeatherSource +WeatherSource <|-- SqlWeatherSource +WeatherSource <|-- InfluxDbWeatherSource +WeatherSource <|-- CouchbaseWeatherSource + +class CsvWeatherSource { + - CsvDataSource dataSource + - CsvWeatherSource(String, String, FileNamingStrategy, IdCoordinateSource, TimeBasedWeatherValueFactory) +} +class SqlWeatherSource { + - SqlDataSource dataSource + - String factoryCoordinateFieldName + - SqlWeatherSource(SqlConnector, IdCoordinateSource, String, String, TimeBasedWeatherValueFactory) +} +class InfluxDbWeatherSource { + - InfluxDbConnector connector + - InfluxDbWeatherSource(InfluxDbConnector, IdCoordinateSource, TimeBasedWeatherValueFactory) + - IndividualTimeSeries getWeather(ClosedInterval, Point) +} +class CouchbaseWeatherSource { + - CouchbaseConnector connector, + - CouchbaseWeatherSource(CouchbaseConnector, IdCoordinateSource, String, TimeBasedWeatherValueFactory, String) + - CouchbaseWeatherSource(CouchbaseConnector, IdCoordinateSource, String, String, TimeBasedWeatherValueFactory, String) + - String generateWeatherKey(ZonedDateTime, Integer) + - String createQueryStringForIntervalAndCoordinate(ClosedInterval, int) +} + +interface IdCoordinateSource { + {abstract} Optional getCoordinate(int) + {abstract} Collection getCoordinates(int...) + {abstract} Optional getId(Point) + {abstract} Collection getAllCoordinates() + {abstract} List getNearestCoordinates(Point, int) + {abstract} List getClosestCoordinates(Point, int, ComparableQuantity) +} + +IdCoordinateSource <|.. CsvIdCoordinateSource +IdCoordinateSource <|.. SqlIdCoordinateSource + +class CsvIdCoordinateSource { + - Map idToCoordinate + - Map coordinateToId + - CsvDataSource dataSource + - IdCoordinateFactory factory + - CsvIdCoordinateSource(IdCoordinateFactory, CsvDataSource) +} + +class SqlIdCoordinateSource { + - SqlDataSource dataSource + - SqlIdCoordinateFactory factory + - SqlIdCoordinateSource(SqlIdCoordinateFactory, String, SqlDataSource) +} + +@enduml \ No newline at end of file diff --git a/gradle/scripts/mavenCentralPublish.gradle b/gradle/scripts/mavenCentralPublish.gradle index c1e1fb120..7b091d412 100644 --- a/gradle/scripts/mavenCentralPublish.gradle +++ b/gradle/scripts/mavenCentralPublish.gradle @@ -73,7 +73,6 @@ if (project.hasProperty('user') && project.hasProperty('password') && project.ha from components.java artifact sourcesJar artifact javadocJar - } } repositories { diff --git a/gradle/scripts/sonarqube.gradle b/gradle/scripts/sonarqube.gradle index 952adc896..1612b3ef5 100644 --- a/gradle/scripts/sonarqube.gradle +++ b/gradle/scripts/sonarqube.gradle @@ -34,8 +34,6 @@ sonarqube { // exclusions property 'sonar.exclusions', ['docs/**'] - - } } diff --git a/input/StandardAssetTypes/line_type_input.csv b/input/StandardAssetTypes/line_type_input.csv new file mode 100644 index 000000000..fe135682f --- /dev/null +++ b/input/StandardAssetTypes/line_type_input.csv @@ -0,0 +1,34 @@ +uuid,b,g,i_max,id,r,v_rated,x +91617ab8-3de2-4fba-be45-a54473ba09a9,3.61283,0.0,1300.0,LineType_1,0.08,380.0,0.32 +b3b231ae-a971-4432-80d7-4ce2f2a56a32,3.22799,0.0,1950.0,LineType_4,0.033333,380.0,0.333333 +24595f91-8295-41f8-a3d8-c9418d860d9c,1.076,0.0,650.0,LineType_6,0.1,380.0,1.0 +f0fc57ec-aa5a-4484-b870-be70a5428cbd,6.45597,0.0,3900.0,LineType_9,0.016667,380.0,0.166667 +ba70d8e7-b082-49bc-8c45-3c10e1236c3e,8.60796,0.0,5200.0,LineType_10,0.0125,380.0,0.125 +eee8eeed-62c9-4345-aa5a-3743fe32007d,12.9119,0.0,7800.0,LineType_11,0.008333,380.0,0.083333 +d2b16935-dcd7-44d2-8623-cec4c703ccdc,17.2159,0.0,10400.0,LineType_12,0.00625,ehv,0.0625 +a490c96e-6e90-485a-b0d7-adeb81fa09cd,4.30398,0.0,2600.0,LineType_2,0.025,220.0,0.25 +5272bcbc-7d0e-4759-85fa-27943fd8d19c,2.15199,0.0,1300.0,LineType_3,0.05,220.0,0.5 +dd0bac07-de8d-4608-af36-b8ff2819f55a,7.22566,0.0,2600.0,LineType_5,0.04,220.0,0.16 +64c1dcb5-57a5-4f35-b2bf-9ae4e6cc4943,1.80642,0.0,650.0,LineType_7,0.16,220.0,0.64 +bdc83a85-c796-4bcb-8b79-8988dc2804f8,5.41925,0.0,1950.0,LineType_8,0.053333,220.0,0.213333 +3d75fb6b-f0be-4451-ab4c-7f00c0ebd619,2.8274,0.0,680.0,Al/St_265/35,0.1095,110.0,0.296 +f5dcaf44-7a9a-4b85-89ba-5c15c04c5766,3.45575,0.0,105.0,15-AL1/3-ST1A 20.0,1.8769,20.0,0.409 +9cbf484b-7256-4e7a-9c35-3e1049909aa0,3.53429,0.0,140.0,24-AL1/4-ST1A 20.0,1.2012,20.0,0.394 +5b542a50-b0c2-4497-ba90-b2b31aafaa0b,2.87456,0.0,170.0,34-AL1/6-ST1A 20.0,0.8342,20.0,0.382 +d594cd67-4459-44bc-9594-db710372db71,2.98451,0.0,210.0,48-AL1/8-ST1A 20.0,0.5939,20.0,0.372 +305e60ad-cfd2-4127-9d83-8d9b21942d93,3.04734,0.0,290.0,70-AL1/11-ST1A 20.0,0.4132,20.0,0.36 +cc59abd4-770b-45d2-98c8-919c91f1ca4b,58.7478,0.0,652.0,1x630_RM/50,0.122,110.0,0.122522 +82ea1b98-2b21-48bd-841a-8d17d8ac20c9,59.3761,0.0,158.0,NA2XS2Y 1x50 RM/25 12/20 kV,0.64,20.0,0.145 +4adef9e6-5e40-416d-8bd2-b6768d156c54,59.6903,0.0,220.0,NA2XS2Y 1x70 RM/25 12/20 kV,0.443,20.0,0.132 +d5c03484-59c2-44d5-a2ee-63a5a0d623b4,67.8584,0.0,252.0,NA2XS2Y 1x95 RM/25 12/20 kV,0.313,20.0,0.132 +9c13909d-1dd1-4e2d-980b-55345bdf0fd0,72.2566,0.0,283.0,NA2XS2Y 1x120 RM/25 12/20 kV,0.253,20.0,0.119 +36243493-eb31-4e81-bd13-b54ef59c4cbe,78.5398,0.0,319.0,NA2XS2Y 1x150 RM/25 12/20 kV,0.206,20.0,0.116 +437689f8-366d-4b04-b42d-d7a754db074b,85.7655,0.0,362.0,NA2XS2Y 1x185 RM/25 12/20 kV,0.161,20.0,0.117 +b459115d-d4eb-47d4-b7ec-625339ee0dcc,95.5044,0.0,421.0,NA2XS2Y 1x240 RM/25 12/20 kV,0.122,20.0,0.112 +9aed5818-c037-4033-8d15-806c62d70b8f,113.097,0.0,315.0,NA2XS2Y 1x150 RM/25 6/10 kV,0.206,10.0,0.11 +60d37bc7-157a-4c32-b1b5-e74c10d70531,127.549,0.0,358.0,NA2XS2Y 1x185 RM/25 6/10 kV,0.161,10.0,0.11 +a3ced617-2ffd-4593-b8e9-bcad9a521aab,143.257,0.0,416.0,NA2XS2Y 1x240 RM/25 6/10 kV,0.122,10.0,0.105 +f0484bb6-9d0d-4d13-bfbe-b83783b8352a,150.796,0.0,471.0,NA2XS2Y 1x300 RM/25 6/10 kV,0.1,10.0,0.0974 +6b223bc3-69e2-4eb8-a2c0-76be1cd2c998,169.646,0.0,535.0,NA2XS2Y 1x400 RM/25 6/10 kV,0.078,10.0,0.0942 +65181464-230a-487b-978f-81e406e9eb22,260.752,0.0,270.0,NAYY 4x150SE 0.6/1kV,0.2067,0.4,0.0804248 +1200d9eb-6d10-47f3-8543-abea43b128d3,273.319,0.0,357.0,NAYY 4x240SE 0.6/1kV,0.1267,0.4,0.0797965 diff --git a/input/StandardAssetTypes/transformer_2_w_type_input.csv b/input/StandardAssetTypes/transformer_2_w_type_input.csv new file mode 100644 index 000000000..4b17a851f --- /dev/null +++ b/input/StandardAssetTypes/transformer_2_w_type_input.csv @@ -0,0 +1,12 @@ +uuid,b_m,d_phi,d_v,g_m,id,r_sc,s_rated,tap_max,tap_min,tap_neutr,tap_side,v_rated_a,v_rated_b,x_sc +5a890aae-b9c9-4ebf-8a49-8850ae9df402,219.43184927638458,0.0,1.0,1731.3019390581715,Typ_x_380/220,0.6016666666666666,600000.0,16,-16,0,false,380.0,220.0,44.51926783240413 +03159c0d-126e-47cc-9871-066870df3a3f,1193.4686938790917,0.0,1.0,831.0249307479223,350MVA_380/110,1.0608979591836734,350000.0,16,-16,0,false,380.0,110.0,90.75951402093402 +7cb289cb-e6af-4470-9c68-e5a91978a5e7,2013.800484464662,0.0,1.0,1446.280991735537,300MVA_220/110,0.20704444444444442,300000.0,16,-16,0,false,220.0,110.0,19.358892855688435 +73644bc6-78cf-4882-9837-e6508cab092d,867.7685950413226,0.0,1.5,1157.0247933884295,25 MVA 110/20 kV YNd5,1.9843999999999997,25000.0,9,-9,0,false,110.0,20.0,58.04608993412045 +6935ae26-374a-4c24-aeee-6d5760d6ddf3,720.4791642215993,0.0,1.5,1487.603305785124,40 MVA 110/20 kV YNd5,1.0285,40000.0,9,-9,0,false,110.0,20.0,48.994205909984906 +b49db20f-b8b5-4265-8318-f669b9d121e9,1015.6886939330394,0.0,1.5,1818.181818181818,63 MVA 110/10 kV YNd5,0.6146031746031745,63000.0,9,-9,0,false,110.0,10.0,34.56596500037509 +0843b836-cee4-4a8c-81a4-098400fe91cf,24.495101551166183,0.0,2.5,2999.9999999999995,0.4 MVA 20/0.4 kV Dyn5 ASEA,11.999999999999998,400.0,2,-2,0,false,20.0,0.4,58.787753826796276 +a8f3aeea-ef4d-4f3c-bb07-09a0a86766c1,9.591746452043322,0.0,2.5,1149.9999999999998,0.16 MVA 20/0.4 kV DOTE 160/20 SGB,36.71874999999999,160.0,2,-2,0,false,20.0,0.4,93.01469452961452 +0644c120-a247-425f-bbe4-31b153f7f440,16.583241729259253,0.0,2.5,2199.9999999999995,0.25 MVA 20/0.4 kV Dyn5 ASEA,21.119999999999997,250.0,2,-2,0,false,20.0,0.4,93.6479876986153 +bdf22ee4-deba-41f4-a187-ae00638a6880,36.47380569074435,0.0,2.5,4125.0,0.63 MVA 20/0.4 kV Dyn5 ASEA,6.953892668178382,630.0,2,-2,0,false,20.0,0.4,37.45518044666632 +a0cbd90a-4e9f-47db-8dca-041d3a288f77,145.8952227629774,0.0,2.5,16500.0,0.63 MVA 10/0.4 kV Dyn5 ASEA,1.7384731670445954,630.0,2,-2,0,false,10.0,0.4,9.36379511166658 diff --git a/src/main/java/edu/ie3/datamodel/exceptions/EntityProcessorException.java b/src/main/java/edu/ie3/datamodel/exceptions/EntityProcessorException.java index f81118f49..37736b231 100644 --- a/src/main/java/edu/ie3/datamodel/exceptions/EntityProcessorException.java +++ b/src/main/java/edu/ie3/datamodel/exceptions/EntityProcessorException.java @@ -9,7 +9,7 @@ * Is thrown, when an something went wrong during entity field mapping creation in a {@link * edu.ie3.datamodel.io.processor.EntityProcessor} */ -public class EntityProcessorException extends RuntimeException { +public class EntityProcessorException extends Exception { public EntityProcessorException(final String message, final Throwable cause) { super(message, cause); } diff --git a/src/main/java/edu/ie3/datamodel/exceptions/FailedValidationException.java b/src/main/java/edu/ie3/datamodel/exceptions/FailedValidationException.java new file mode 100644 index 000000000..0fc60cd13 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/exceptions/FailedValidationException.java @@ -0,0 +1,25 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.exceptions; + +import edu.ie3.datamodel.utils.ExceptionUtils; +import java.util.List; + +public class FailedValidationException extends ValidationException { + public FailedValidationException(String message, Throwable throwable) { + super(message, throwable); + } + + public FailedValidationException(String message) { + super(message); + } + + /** @param exceptions List of exceptions, which must not be empty */ + public FailedValidationException(List exceptions) { + super( + "Validation failed due to: \n" + ExceptionUtils.getMessages(exceptions), exceptions.get(0)); + } +} diff --git a/src/main/java/edu/ie3/datamodel/exceptions/FailureException.java b/src/main/java/edu/ie3/datamodel/exceptions/FailureException.java new file mode 100644 index 000000000..7c1f257a2 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/exceptions/FailureException.java @@ -0,0 +1,20 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.exceptions; + +public class FailureException extends Exception { + public FailureException(String message, Throwable throwable) { + super(message, throwable); + } + + public FailureException(String message) { + super(message); + } + + public FailureException(Throwable throwable) { + super(throwable); + } +} diff --git a/src/main/java/edu/ie3/datamodel/exceptions/GraphicSourceException.java b/src/main/java/edu/ie3/datamodel/exceptions/GraphicSourceException.java new file mode 100644 index 000000000..91d6958c7 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/exceptions/GraphicSourceException.java @@ -0,0 +1,14 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.exceptions; + +import java.util.List; + +public class GraphicSourceException extends SourceException { + public GraphicSourceException(String message, List exceptions) { + super(message, exceptions); + } +} diff --git a/src/main/java/edu/ie3/datamodel/exceptions/InvalidEntityException.java b/src/main/java/edu/ie3/datamodel/exceptions/InvalidEntityException.java index 162acd0b1..744e08c25 100644 --- a/src/main/java/edu/ie3/datamodel/exceptions/InvalidEntityException.java +++ b/src/main/java/edu/ie3/datamodel/exceptions/InvalidEntityException.java @@ -13,12 +13,13 @@ public class InvalidEntityException extends ValidationException { private static final long serialVersionUID = 809496087520306374L; public InvalidEntityException(String faultDescription, UniqueEntity invalidEntity) { - super("Entity is invalid because of: " + faultDescription + " [" + invalidEntity + "]"); + super("Entity is invalid because of: \n" + faultDescription + " [" + invalidEntity + "]"); } public InvalidEntityException( String faultDescription, Throwable cause, UniqueEntity invalidEntity) { - super("Entity is invalid because of: " + faultDescription + " [" + invalidEntity + "]", cause); + super( + "Entity is invalid because of: \n" + faultDescription + " [" + invalidEntity + "]", cause); } public InvalidEntityException(String message, Throwable cause) { diff --git a/src/main/java/edu/ie3/datamodel/exceptions/RawGridException.java b/src/main/java/edu/ie3/datamodel/exceptions/RawGridException.java new file mode 100644 index 000000000..629232220 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/exceptions/RawGridException.java @@ -0,0 +1,14 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.exceptions; + +import java.util.List; + +public class RawGridException extends SourceException { + public RawGridException(String message, List exceptions) { + super(message, exceptions); + } +} diff --git a/src/main/java/edu/ie3/datamodel/exceptions/SourceException.java b/src/main/java/edu/ie3/datamodel/exceptions/SourceException.java index 65d2f1cff..2a5a19030 100644 --- a/src/main/java/edu/ie3/datamodel/exceptions/SourceException.java +++ b/src/main/java/edu/ie3/datamodel/exceptions/SourceException.java @@ -5,6 +5,9 @@ */ package edu.ie3.datamodel.exceptions; +import edu.ie3.datamodel.utils.ExceptionUtils; +import java.util.List; + /** * Exception that should be used whenever an error occurs in a instance of a {@link * edu.ie3.datamodel.io.source.DataSource} @@ -27,4 +30,8 @@ public SourceException(final Throwable cause) { public SourceException(final String message) { super(message); } + + public SourceException(String message, List exceptions) { + super(message + " " + ExceptionUtils.getMessages(exceptions), exceptions.get(0)); + } } diff --git a/src/main/java/edu/ie3/datamodel/exceptions/SystemParticipantsException.java b/src/main/java/edu/ie3/datamodel/exceptions/SystemParticipantsException.java new file mode 100644 index 000000000..7c97dadf7 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/exceptions/SystemParticipantsException.java @@ -0,0 +1,14 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.exceptions; + +import java.util.List; + +public class SystemParticipantsException extends SourceException { + public SystemParticipantsException(String message, List exceptions) { + super(message, exceptions); + } +} diff --git a/src/main/java/edu/ie3/datamodel/exceptions/TryException.java b/src/main/java/edu/ie3/datamodel/exceptions/TryException.java new file mode 100644 index 000000000..61d134d0f --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/exceptions/TryException.java @@ -0,0 +1,12 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.exceptions; + +public class TryException extends RuntimeException { + public TryException(String message, Throwable throwable) { + super(message, throwable); + } +} diff --git a/src/main/java/edu/ie3/datamodel/exceptions/ValidationException.java b/src/main/java/edu/ie3/datamodel/exceptions/ValidationException.java index 080602f9c..e74d70696 100644 --- a/src/main/java/edu/ie3/datamodel/exceptions/ValidationException.java +++ b/src/main/java/edu/ie3/datamodel/exceptions/ValidationException.java @@ -5,7 +5,7 @@ */ package edu.ie3.datamodel.exceptions; -public abstract class ValidationException extends RuntimeException { +public abstract class ValidationException extends Exception { protected ValidationException(String s) { super(s); } diff --git a/src/main/java/edu/ie3/datamodel/graph/ImpedanceWeightedGraph.java b/src/main/java/edu/ie3/datamodel/graph/ImpedanceWeightedGraph.java index 267ed5603..14e630dfa 100644 --- a/src/main/java/edu/ie3/datamodel/graph/ImpedanceWeightedGraph.java +++ b/src/main/java/edu/ie3/datamodel/graph/ImpedanceWeightedGraph.java @@ -39,21 +39,4 @@ public void setEdgeWeightQuantity( weight.to(ImpedanceWeightedEdge.DEFAULT_IMPEDANCE_UNIT).getValue().doubleValue(); super.setEdgeWeight(edge, weightDouble); } - - /** - * The only purpose for overriding this method is to provide a better indication of the unit that - * is expected to be passed in. It is highly advised to use the {@link - * ImpedanceWeightedGraph#setEdgeWeightQuantity(ImpedanceWeightedEdge, ComparableQuantity)} for - * safety purposes that the provided edge weight is correct. - * - * @param edge the edge whose weight should be altered - * @param impedanceInOhm the weight of the {@link ImpedanceWeightedEdge} in ohm - * @deprecated Use {@link ImpedanceWeightedGraph#setEdgeWeightQuantity(ImpedanceWeightedEdge, - * ComparableQuantity)} instead, as it provides means for proper unit handling - */ - @Override - @Deprecated(since = "Deprecated since 2.1.0. See Javadocs for more information.") - public void setEdgeWeight(ImpedanceWeightedEdge edge, double impedanceInOhm) { - super.setEdgeWeight(edge, impedanceInOhm); - } } diff --git a/src/main/java/edu/ie3/datamodel/graph/SubGridGate.java b/src/main/java/edu/ie3/datamodel/graph/SubGridGate.java index ab822182e..53b22a205 100644 --- a/src/main/java/edu/ie3/datamodel/graph/SubGridGate.java +++ b/src/main/java/edu/ie3/datamodel/graph/SubGridGate.java @@ -53,24 +53,6 @@ public static SubGridGate fromTransformer3W( }; } - /** @deprecated since 3.0. Use {@link #link()} instead */ - @Deprecated(since = "3.0") - public TransformerInput getLink() { - return link; - } - - /** @deprecated since 3.0. Use {@link #superiorNode()} instead */ - @Deprecated(since = "3.0") - public NodeInput getSuperiorNode() { - return superiorNode; - } - - /** @deprecated since 3.0. Use {@link #inferiorNode()} instead */ - @Deprecated(since = "3.0") - public NodeInput getInferiorNode() { - return inferiorNode; - } - public int getSuperiorSubGrid() { return superiorNode.getSubnet(); } diff --git a/src/main/java/edu/ie3/datamodel/io/IoUtil.java b/src/main/java/edu/ie3/datamodel/io/IoUtil.java index ddda0c8fa..0968b4f28 100644 --- a/src/main/java/edu/ie3/datamodel/io/IoUtil.java +++ b/src/main/java/edu/ie3/datamodel/io/IoUtil.java @@ -6,6 +6,8 @@ package edu.ie3.datamodel.io; import java.io.File; +import java.nio.file.Path; +import java.util.Optional; public class IoUtil { public static final String FILE_SEPARATOR_REGEX = "[\\\\/]"; @@ -17,8 +19,8 @@ private IoUtil() { } /** - * Ensure to have harmonized file separator across the whole String. Will replace all occurences - * if "\" and "/" by the systems file separator + * Ensure to have harmonized file separator across the whole String. Will replace all occurrences + * of "\" and "/" by the systems file separator. * * @param in The String to harmonize * @return The harmonized String @@ -26,4 +28,25 @@ private IoUtil() { public static String harmonizeFileSeparator(String in) { return in.replaceAll(FILE_SEPARATOR_REGEX, FILE_SEPARATOR_REPLACEMENT); } + + /** + * Ensure to have harmonized file separator across the whole path. Will replace all occurrences * + * of "\" and "/" by the systems file separator. + * + * @param path the path to harmonize + * @return the harmonized path + */ + public static Path harmonizeFileSeparator(Path path) { + return Path.of(IoUtil.harmonizeFileSeparator(path.toString())); + } + + /** + * Method to wrap a string of a path in an option for a path. + * + * @param in string of the path + * @return option of the path + */ + public static Optional pathOption(String in) { + return Optional.of(Path.of(in)); + } } diff --git a/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java b/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java index 8d8e07f30..71147a7db 100644 --- a/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java +++ b/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java @@ -20,12 +20,10 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; import java.util.*; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.apache.commons.io.FilenameUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -42,17 +40,13 @@ public class CsvFileConnector implements DataConnector { private final Map, BufferedCsvWriter> entityWriters = new HashMap<>(); private final Map timeSeriesWriters = new HashMap<>(); - // ATTENTION: Do not finalize. It's meant for lazy evaluation. - @Deprecated(since = "3.0", forRemoval = true) - private Map - individualTimeSeriesMetaInformation; private final FileNamingStrategy fileNamingStrategy; - private final String baseDirectoryName; + private final Path baseDirectoryName; private static final String FILE_ENDING = ".csv"; - public CsvFileConnector(String baseDirectoryName, FileNamingStrategy fileNamingStrategy) { + public CsvFileConnector(Path baseDirectoryName, FileNamingStrategy fileNamingStrategy) { this.baseDirectoryName = baseDirectoryName; this.fileNamingStrategy = fileNamingStrategy; } @@ -106,34 +100,25 @@ BufferedCsvWriter getOrInitWriter(T timeSeries, String[] headerElements, String * @throws ConnectorException If the base folder is a file * @throws IOException If the writer cannot be initialized correctly */ - private BufferedCsvWriter initWriter(String baseDirectory, CsvFileDefinition fileDefinition) + private BufferedCsvWriter initWriter(Path baseDirectory, CsvFileDefinition fileDefinition) throws ConnectorException, IOException { /* Join the full DIRECTORY path (excluding file name) */ - String baseDirectoryHarmonized = IoUtil.harmonizeFileSeparator(baseDirectory); - String fullDirectoryPath = - FilenameUtils.concat(baseDirectoryHarmonized, fileDefinition.directoryPath()); - String fullPath = FilenameUtils.concat(baseDirectoryHarmonized, fileDefinition.getFilePath()); + Path baseDirectoryHarmonized = IoUtil.harmonizeFileSeparator(baseDirectory); + Path fullDirectoryPath = baseDirectoryHarmonized.resolve(fileDefinition.getDirectoryPath()); + Path fullPath = baseDirectoryHarmonized.resolve(fileDefinition.getFilePath()); /* Create missing directories */ - File directories = new File(fullDirectoryPath); + File directories = fullDirectoryPath.toFile(); if (directories.isFile()) throw new ConnectorException("Directory '" + directories + "' already exists and is a file!"); if (!directories.exists() && !directories.mkdirs()) throw new IOException("Unable to create directory tree '" + directories + "'"); - File pathFile = new File(fullPath); - boolean append = pathFile.exists(); BufferedCsvWriter writer = new BufferedCsvWriter( - fullPath, fileDefinition.headLineElements(), fileDefinition.csvSep(), append); - if (!append) { - writer.writeFileHeader(); - } else { - log.warn( - "File '{}' already exist. Will append new content WITHOUT new header! Full path: {}", - fileDefinition.fileName(), - pathFile.getAbsolutePath()); - } + fullPath, fileDefinition.headLineElements(), fileDefinition.csvSep(), false); + writer.writeFileHeader(); + return writer; } @@ -181,10 +166,10 @@ public synchronized void closeEntityWriter(Class clz * @return the reader that contains information about the file to be read in * @throws FileNotFoundException If the matching file cannot be found */ - public BufferedReader initReader(Class clz) throws FileNotFoundException { - String filePath = null; + public BufferedReader initReader(Class clz) + throws FileNotFoundException, ConnectorException { try { - filePath = + Path filePath = fileNamingStrategy .getFilePath(clz) .orElseThrow( @@ -193,13 +178,11 @@ public BufferedReader initReader(Class clz) throws FileN "Cannot find a naming strategy for class '" + clz.getSimpleName() + "'.")); + return initReader(filePath); } catch (ConnectorException e) { - log.error( - "Cannot get reader for entity '{}' as no file naming strategy for this file exists. Exception: {}", - clz.getSimpleName(), - e); + throw new ConnectorException( + "Cannot initialize reader for entity '" + clz.getSimpleName() + "'.", e); } - return initReader(filePath); } /** @@ -210,33 +193,12 @@ public BufferedReader initReader(Class clz) throws FileN * @return the reader that contains information about the file to be read in * @throws FileNotFoundException if no file with the provided file name can be found */ - public BufferedReader initReader(String filePath) throws FileNotFoundException { - File fullPath = new File(baseDirectoryName + File.separator + filePath + FILE_ENDING); + public BufferedReader initReader(Path filePath) throws FileNotFoundException { + File fullPath = baseDirectoryName.resolve(filePath.toString() + FILE_ENDING).toFile(); return new BufferedReader( new InputStreamReader(new FileInputStream(fullPath), StandardCharsets.UTF_8), 16384); } - /** - * Get time series meta information for a given uuid. - * - *

This method lazily evaluates the mapping from all time series files to their meta - * information. - * - * @param timeSeriesUuid The time series in question - * @return An option on the queried information - * @deprecated since 3.0. Use {@link #getCsvIndividualTimeSeriesMetaInformation(ColumnScheme...)} - * instead - */ - @Deprecated(since = "3.0", forRemoval = true) - public Optional - getIndividualTimeSeriesMetaInformation(UUID timeSeriesUuid) { - if (Objects.isNull(individualTimeSeriesMetaInformation)) - individualTimeSeriesMetaInformation = getCsvIndividualTimeSeriesMetaInformation(); - - return Optional.ofNullable(individualTimeSeriesMetaInformation.get(timeSeriesUuid)) - .map(edu.ie3.datamodel.io.csv.timeseries.IndividualTimeSeriesMetaInformation::new); - } - /** * Receive the information for specific time series. They are given back filtered by the column * scheme in order to allow for accounting the different content types. @@ -245,16 +207,16 @@ public BufferedReader initReader(String filePath) throws FileNotFoundException { * possible readers will be initialized. * @return A mapping from column scheme to the individual time series meta information */ - public Map + public Map getCsvIndividualTimeSeriesMetaInformation(final ColumnScheme... columnSchemes) { return getIndividualTimeSeriesFilePaths().parallelStream() .map( filePath -> { /* Extract meta information from file path and enhance it with the file path itself */ IndividualTimeSeriesMetaInformation metaInformation = - fileNamingStrategy.individualTimeSeriesMetaInformation(filePath); - return new edu.ie3.datamodel.io.csv.CsvIndividualTimeSeriesMetaInformation( - metaInformation, FileNamingStrategy.removeFileNameEnding(filePath)); + fileNamingStrategy.individualTimeSeriesMetaInformation(filePath.toString()); + return new CsvIndividualTimeSeriesMetaInformation( + metaInformation, FileNamingStrategy.removeFileNameEnding(filePath.getFileName())); }) .filter( metaInformation -> @@ -271,23 +233,20 @@ public BufferedReader initReader(String filePath) throws FileNotFoundException { * * @return A set of relative paths to time series files, with respect to the base folder path */ - private Set getIndividualTimeSeriesFilePaths() { - Path baseDirectoryPath = - Paths.get( - FilenameUtils.getFullPath(baseDirectoryName) - + FilenameUtils.getName(baseDirectoryName)); + private Set getIndividualTimeSeriesFilePaths() { + Path baseDirectoryPath = baseDirectoryName.resolve(baseDirectoryName); try (Stream pathStream = Files.walk(baseDirectoryPath)) { return pathStream .map(baseDirectoryPath::relativize) .filter( path -> { - String withoutEnding = FileNamingStrategy.removeFileNameEnding(path.toString()); + Path withoutEnding = + Path.of(FileNamingStrategy.removeFileNameEnding(path.toString())); return fileNamingStrategy .getIndividualTimeSeriesPattern() - .matcher(withoutEnding) + .matcher(withoutEnding.toString()) .matches(); }) - .map(Path::toString) .collect(Collectors.toSet()); } catch (IOException e) { log.error("Unable to determine time series files readers for time series.", e); @@ -303,7 +262,7 @@ private Set getIndividualTimeSeriesFilePaths() { * @throws FileNotFoundException If the file is not present */ public BufferedReader initIdCoordinateReader() throws FileNotFoundException { - String filePath = fileNamingStrategy.getIdCoordinateEntityName(); + Path filePath = Path.of(fileNamingStrategy.getIdCoordinateEntityName()); return initReader(filePath); } @@ -319,7 +278,7 @@ public BufferedReader initIdCoordinateReader() throws FileNotFoundException { private , E extends TimeSeriesEntry, V extends Value> CsvFileDefinition buildFileDefinition(T timeSeries, String[] headLineElements, String csvSep) throws ConnectorException { - String directoryPath = fileNamingStrategy.getDirectoryPath(timeSeries).orElse(""); + Path directoryPath = fileNamingStrategy.getDirectoryPath(timeSeries).orElse(Path.of("")); String fileName = fileNamingStrategy .getEntityName(timeSeries) @@ -342,7 +301,7 @@ CsvFileDefinition buildFileDefinition(T timeSeries, String[] headLineElements, S private CsvFileDefinition buildFileDefinition( Class clz, String[] headLineElements, String csvSep) throws ConnectorException { - String directoryPath = fileNamingStrategy.getDirectoryPath(clz).orElse(""); + Path directoryPath = fileNamingStrategy.getDirectoryPath(clz).orElse(Path.of("")); String fileName = fileNamingStrategy .getEntityName(clz) @@ -366,60 +325,4 @@ public void shutdown() { } }); } - - /** - * Enhancing the {@link IndividualTimeSeriesMetaInformation} with the full path to csv file - * - * @deprecated since 3.0. Use {@link - * edu.ie3.datamodel.io.csv.CsvIndividualTimeSeriesMetaInformation} instead - */ - @Deprecated(since = "3.0", forRemoval = true) - public static class CsvIndividualTimeSeriesMetaInformation - extends edu.ie3.datamodel.io.csv.timeseries.IndividualTimeSeriesMetaInformation { - private final String fullFilePath; - - public CsvIndividualTimeSeriesMetaInformation( - UUID uuid, - edu.ie3.datamodel.io.csv.timeseries.ColumnScheme columnScheme, - String fullFilePath) { - super(uuid, columnScheme); - this.fullFilePath = fullFilePath; - } - - public CsvIndividualTimeSeriesMetaInformation( - edu.ie3.datamodel.io.csv.timeseries.IndividualTimeSeriesMetaInformation metaInformation, - String fullFilePath) { - this(metaInformation.getUuid(), metaInformation.getColumnScheme(), fullFilePath); - } - - public String getFullFilePath() { - return fullFilePath; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof CsvIndividualTimeSeriesMetaInformation that)) return false; - if (!super.equals(o)) return false; - return fullFilePath.equals(that.fullFilePath); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), fullFilePath); - } - - @Override - public String toString() { - return "CsvIndividualTimeSeriesMetaInformation{" - + "uuid=" - + getUuid() - + ", columnScheme=" - + getColumnScheme() - + ", fullFilePath='" - + fullFilePath - + '\'' - + '}'; - } - } } diff --git a/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java b/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java index d091d310b..b62170ac8 100644 --- a/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java +++ b/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java @@ -9,6 +9,7 @@ import edu.ie3.util.StringUtils; import java.io.*; import java.nio.charset.StandardCharsets; +import java.nio.file.Path; import java.util.Arrays; import java.util.Map; import java.util.Objects; @@ -39,10 +40,11 @@ public class BufferedCsvWriter extends BufferedWriter { * if no file exists, a new one will be created in both cases * @throws IOException If the FileOutputStream cannot be established. */ - public BufferedCsvWriter( - String filePath, String[] headLineElements, String csvSep, boolean append) + public BufferedCsvWriter(Path filePath, String[] headLineElements, String csvSep, boolean append) throws IOException { - super(new OutputStreamWriter(new FileOutputStream(filePath, append), StandardCharsets.UTF_8)); + super( + new OutputStreamWriter( + new FileOutputStream(filePath.toFile(), append), StandardCharsets.UTF_8)); this.headLineElements = headLineElements; this.csvSep = csvSep; } @@ -59,10 +61,10 @@ public BufferedCsvWriter( * if no file exists, a new one will be created in both cases * @throws IOException If the FileOutputStream cannot be established. */ - public BufferedCsvWriter(String baseFolder, CsvFileDefinition fileDefinition, boolean append) + public BufferedCsvWriter(Path baseFolder, CsvFileDefinition fileDefinition, boolean append) throws IOException { this( - baseFolder + File.separator + fileDefinition.getFilePath(), + baseFolder.resolve(fileDefinition.getFilePath()), fileDefinition.headLineElements(), fileDefinition.csvSep(), append); diff --git a/src/main/java/edu/ie3/datamodel/io/csv/CsvFileDefinition.java b/src/main/java/edu/ie3/datamodel/io/csv/CsvFileDefinition.java index c24bac240..6e2429a45 100644 --- a/src/main/java/edu/ie3/datamodel/io/csv/CsvFileDefinition.java +++ b/src/main/java/edu/ie3/datamodel/io/csv/CsvFileDefinition.java @@ -5,90 +5,36 @@ */ package edu.ie3.datamodel.io.csv; -import edu.ie3.datamodel.io.IoUtil; +import edu.ie3.datamodel.utils.FileUtils; +import java.nio.file.Path; import java.util.Arrays; import java.util.Objects; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import org.apache.commons.io.FilenameUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public record CsvFileDefinition( - String fileName, String directoryPath, String[] headLineElements, String csvSep) { - private static final Logger logger = LoggerFactory.getLogger(CsvFileDefinition.class); - - private static final Pattern FILE_NAME_PATTERN = - Pattern.compile( - "^(?[^\\\\/\\s.]{0,255})(?:\\.(?[a-zA-Z0-9]{0,10}(?:\\.[a-zA-Z0-9]{0,10})?))?$"); - - private static final String FILE_EXTENSION = "csv"; +/** + * A definition of a csv file. + * + * @param filePath the path of the csv file (including filename and relative path) + * @param headLineElements elements of the headline of the defined file + * @param csvSep the separator that is used in this csv file + */ +public record CsvFileDefinition(Path filePath, String[] headLineElements, String csvSep) { public CsvFileDefinition( - String fileName, String directoryPath, String[] headLineElements, String csvSep) { - /* Remove all file separators at the beginning and end of a directory path and ensure harmonized file separator */ - this.directoryPath = - Objects.nonNull(directoryPath) - ? IoUtil.harmonizeFileSeparator( - directoryPath - .replaceFirst("^" + IoUtil.FILE_SEPARATOR_REGEX, "") - .replaceAll(IoUtil.FILE_SEPARATOR_REGEX + "$", "")) - : ""; - - /* Check the given information of the file name */ - Matcher matcher = FILE_NAME_PATTERN.matcher(fileName); - if (matcher.matches()) { - String extension = matcher.group("extension"); - if (Objects.nonNull(extension) && !extension.equalsIgnoreCase(FILE_EXTENSION)) - logger.warn( - "You provided a file name with extension '{}'. It will be overridden to '{}'.", - extension, - FILE_EXTENSION); - this.fileName = matcher.group("fileName") + "." + FILE_EXTENSION; - } else { - throw new IllegalArgumentException( - "The file name '" - + fileName - + "' is no valid file name. It may contain everything, except '/', '\\', '.' and any white space character."); - } - - this.headLineElements = headLineElements; - this.csvSep = csvSep; - } - - /** @deprecated since 3.0. Use {@link #directoryPath()} instead */ - @Deprecated(since = "3.0") - public String getDirectoryPath() { - return directoryPath; - } - - /** - * @return The file name including extension - * @deprecated since 3.0. Use {@link #fileName()} instead - */ - @Deprecated(since = "3.0") - public String getFileName() { - return fileName; + String fileName, Path directoryPath, String[] headLineElements, String csvSep) { + this(FileUtils.ofCsv(fileName, directoryPath), headLineElements, csvSep); } /** * @return The path to the file relative to a not explicitly defined base directory, including the * file extension */ - public String getFilePath() { - return !directoryPath.isEmpty() ? FilenameUtils.concat(directoryPath, fileName) : fileName; + public Path getFilePath() { + return filePath; } - /** @deprecated since 3.0. Use {@link #headLineElements()} instead */ - @Deprecated(since = "3.0") - public String[] getHeadLineElements() { - return headLineElements; - } - - /** @deprecated since 3.0. Use {@link #csvSep()} instead */ - @Deprecated(since = "3.0") - public String getCsvSep() { - return csvSep; + /** Returns the directory path of this file. */ + public Path getDirectoryPath() { + Path parent = filePath.getParent(); + return parent != null ? parent : Path.of(""); } @Override @@ -97,15 +43,14 @@ public boolean equals(Object o) { // records' equals method and array fields don't play together nicely if (this == o) return true; if (!(o instanceof CsvFileDefinition that)) return false; - return directoryPath.equals(that.directoryPath) - && fileName.equals(that.fileName) + return filePath.equals(that.filePath) && Arrays.equals(headLineElements, that.headLineElements) && csvSep.equals(that.csvSep); } @Override public int hashCode() { - int result = Objects.hash(directoryPath, fileName, csvSep); + int result = Objects.hash(filePath, csvSep); result = 31 * result + Arrays.hashCode(headLineElements); return result; } @@ -113,11 +58,8 @@ public int hashCode() { @Override public String toString() { return "CsvFileDefinition{" - + "directoryPath='" - + directoryPath - + '\'' - + ", fileName='" - + fileName + + "fullPath='" + + filePath + '\'' + ", headLineElements=" + Arrays.toString(headLineElements) diff --git a/src/main/java/edu/ie3/datamodel/io/csv/CsvIndividualTimeSeriesMetaInformation.java b/src/main/java/edu/ie3/datamodel/io/csv/CsvIndividualTimeSeriesMetaInformation.java index 078071503..49a5630d6 100644 --- a/src/main/java/edu/ie3/datamodel/io/csv/CsvIndividualTimeSeriesMetaInformation.java +++ b/src/main/java/edu/ie3/datamodel/io/csv/CsvIndividualTimeSeriesMetaInformation.java @@ -7,25 +7,26 @@ import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme; import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation; +import java.nio.file.Path; import java.util.Objects; import java.util.UUID; /** Enhancing the {@link IndividualTimeSeriesMetaInformation} with the full path to csv file */ public class CsvIndividualTimeSeriesMetaInformation extends IndividualTimeSeriesMetaInformation { - private final String fullFilePath; + private final Path fullFilePath; public CsvIndividualTimeSeriesMetaInformation( - UUID uuid, ColumnScheme columnScheme, String fullFilePath) { + UUID uuid, ColumnScheme columnScheme, Path fullFilePath) { super(uuid, columnScheme); this.fullFilePath = fullFilePath; } public CsvIndividualTimeSeriesMetaInformation( - IndividualTimeSeriesMetaInformation metaInformation, String fullFilePath) { + IndividualTimeSeriesMetaInformation metaInformation, Path fullFilePath) { this(metaInformation.getUuid(), metaInformation.getColumnScheme(), fullFilePath); } - public String getFullFilePath() { + public Path getFullFilePath() { return fullFilePath; } diff --git a/src/main/java/edu/ie3/datamodel/io/csv/FileNameMetaInformation.java b/src/main/java/edu/ie3/datamodel/io/csv/FileNameMetaInformation.java deleted file mode 100644 index f60770230..000000000 --- a/src/main/java/edu/ie3/datamodel/io/csv/FileNameMetaInformation.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * © 2021. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.csv; - -import java.util.Objects; -import java.util.UUID; - -/** - * Meta information, that can be derived from a certain file name - * - * @deprecated since 3.0. Use {@link edu.ie3.datamodel.io.naming.TimeSeriesMetaInformation} instead - */ -@Deprecated(since = "3.0", forRemoval = true) -public abstract class FileNameMetaInformation { - private final UUID uuid; - - protected FileNameMetaInformation(UUID uuid) { - this.uuid = uuid; - } - - public UUID getUuid() { - return uuid; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof FileNameMetaInformation that)) return false; - return uuid.equals(that.uuid); - } - - @Override - public int hashCode() { - return Objects.hash(uuid); - } - - @Override - public String toString() { - return "FileNameMetaInformation{" + "uuid=" + uuid + '}'; - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/csv/timeseries/ColumnScheme.java b/src/main/java/edu/ie3/datamodel/io/csv/timeseries/ColumnScheme.java deleted file mode 100644 index caf683d31..000000000 --- a/src/main/java/edu/ie3/datamodel/io/csv/timeseries/ColumnScheme.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * © 2021. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.csv.timeseries; - -import edu.ie3.datamodel.models.value.*; -import edu.ie3.util.StringUtils; -import java.util.Arrays; -import java.util.Objects; -import java.util.Optional; - -/** - * Yet supported column schemes in individual time series - * - * @deprecated since 3.0. Use {@link edu.ie3.datamodel.io.naming.timeseries.ColumnScheme} instead - */ -@Deprecated(since = "3.0", forRemoval = true) -public enum ColumnScheme { - ENERGY_PRICE("c", EnergyPriceValue.class), - ACTIVE_POWER("p", PValue.class), - APPARENT_POWER("pq", SValue.class), - HEAT_DEMAND("h", HeatDemandValue.class), - ACTIVE_POWER_AND_HEAT_DEMAND("ph", HeatAndPValue.class), - APPARENT_POWER_AND_HEAT_DEMAND("pqh", HeatAndSValue.class), - WEATHER("weather", WeatherValue.class); - - private final String scheme; - private final Class valueClass; - - @Deprecated - ColumnScheme(String scheme, Class valueClass) { - this.scheme = scheme; - this.valueClass = valueClass; - } - - public String getScheme() { - return scheme; - } - - public Class getValueClass() { - return valueClass; - } - - public static Optional parse(String key) { - String cleanString = StringUtils.cleanString(key).toLowerCase(); - return Arrays.stream(ColumnScheme.values()) - .filter(entry -> Objects.equals(entry.scheme, cleanString)) - .findFirst(); - } - - public static Optional parse(Class valueClass) { - /* IMPORTANT NOTE: Make sure to start with child classes and then use parent classes to allow for most precise - * parsing (child class instances are also assignable to parent classes) */ - - if (EnergyPriceValue.class.isAssignableFrom(valueClass)) return Optional.of(ENERGY_PRICE); - if (HeatAndSValue.class.isAssignableFrom(valueClass)) - return Optional.of(APPARENT_POWER_AND_HEAT_DEMAND); - if (SValue.class.isAssignableFrom(valueClass)) return Optional.of(APPARENT_POWER); - if (HeatAndPValue.class.isAssignableFrom(valueClass)) - return Optional.of(ACTIVE_POWER_AND_HEAT_DEMAND); - if (PValue.class.isAssignableFrom(valueClass)) return Optional.of(ACTIVE_POWER); - if (HeatDemandValue.class.isAssignableFrom(valueClass)) return Optional.of(HEAT_DEMAND); - if (WeatherValue.class.isAssignableFrom(valueClass)) return Optional.of(WEATHER); - return Optional.empty(); - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/csv/timeseries/IndividualTimeSeriesMetaInformation.java b/src/main/java/edu/ie3/datamodel/io/csv/timeseries/IndividualTimeSeriesMetaInformation.java deleted file mode 100644 index fd9f0db40..000000000 --- a/src/main/java/edu/ie3/datamodel/io/csv/timeseries/IndividualTimeSeriesMetaInformation.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * © 2021. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.csv.timeseries; - -import edu.ie3.datamodel.io.csv.FileNameMetaInformation; -import java.util.Objects; -import java.util.UUID; - -/** - * Specific meta information, that can be derived from a individual time series file - * - * @deprecated since 3.0. Use {@link - * edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation} instead - */ -@Deprecated(since = "3.0", forRemoval = true) -public class IndividualTimeSeriesMetaInformation extends FileNameMetaInformation { - private final ColumnScheme columnScheme; - - public IndividualTimeSeriesMetaInformation(UUID uuid, ColumnScheme columnScheme) { - super(uuid); - this.columnScheme = columnScheme; - } - - public IndividualTimeSeriesMetaInformation( - edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation - newMetaInformation) { - super(newMetaInformation.getUuid()); - this.columnScheme = - ColumnScheme.parse(newMetaInformation.getColumnScheme().toString()) - .orElseThrow( - () -> - new RuntimeException( - "Cannot convert new column scheme " - + newMetaInformation.getColumnScheme().getScheme() - + " to deprecated column scheme!")); - } - - public ColumnScheme getColumnScheme() { - return columnScheme; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof IndividualTimeSeriesMetaInformation that)) return false; - if (!super.equals(o)) return false; - return columnScheme == that.columnScheme; - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), columnScheme); - } - - @Override - public String toString() { - return "IndividualTimeSeriesMetaInformation{" - + "uuid=" - + getUuid() - + ", columnScheme=" - + columnScheme - + '}'; - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/csv/timeseries/LoadProfileTimeSeriesMetaInformation.java b/src/main/java/edu/ie3/datamodel/io/csv/timeseries/LoadProfileTimeSeriesMetaInformation.java deleted file mode 100644 index 86a3a984b..000000000 --- a/src/main/java/edu/ie3/datamodel/io/csv/timeseries/LoadProfileTimeSeriesMetaInformation.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * © 2021. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.csv.timeseries; - -import edu.ie3.datamodel.io.csv.FileNameMetaInformation; -import java.util.Objects; -import java.util.UUID; - -/** - * Specific meta information, that can be derived from a load profile time series file - * - * @deprecated since 3.0. Use {@link - * edu.ie3.datamodel.io.naming.timeseries.LoadProfileTimeSeriesMetaInformation} instead - */ -@Deprecated(since = "3.0", forRemoval = true) -public class LoadProfileTimeSeriesMetaInformation extends FileNameMetaInformation { - private final String profile; - - public LoadProfileTimeSeriesMetaInformation(UUID uuid, String profile) { - super(uuid); - this.profile = profile; - } - - public LoadProfileTimeSeriesMetaInformation( - edu.ie3.datamodel.io.naming.timeseries.LoadProfileTimeSeriesMetaInformation metaInformation) { - super(metaInformation.getUuid()); - this.profile = metaInformation.getProfile(); - } - - public String getProfile() { - return profile; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof LoadProfileTimeSeriesMetaInformation that)) return false; - if (!super.equals(o)) return false; - return profile.equals(that.profile); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), profile); - } - - @Override - public String toString() { - return "LoadProfileTimeSeriesMetaInformation{" - + "uuid='" - + getUuid() - + '\'' - + ", profile='" - + profile - + '\'' - + '}'; - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/factory/Factory.java b/src/main/java/edu/ie3/datamodel/io/factory/Factory.java index 8a64f0ea7..0428701ec 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/Factory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/Factory.java @@ -6,6 +6,8 @@ package edu.ie3.datamodel.io.factory; import edu.ie3.datamodel.exceptions.FactoryException; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.*; import java.util.*; import java.util.function.IntFunction; import java.util.stream.Collectors; @@ -26,6 +28,7 @@ public abstract class Factory { private final List> supportedClasses; + @SafeVarargs protected Factory(Class... supportedClasses) { this.supportedClasses = Arrays.asList(supportedClasses); } @@ -39,27 +42,40 @@ public List> getSupportedClasses() { * data * * @param data EntityData (or subclass) containing the data - * @return An entity wrapped in Option if successful, an empty option otherwise + * @return An entity wrapped in a {@link Success} if successful, or an exception wrapped in a + * {@link Failure} */ - public Optional get(D data) { + public Try get(D data) { isSupportedClass(data.getTargetClass()); // magic: case-insensitive get/set calls on set strings final List> allFields = getFields(data); - validateParameters(data, allFields.toArray((IntFunction[]>) Set[]::new)); - try { + validateParameters(data, allFields.toArray((IntFunction[]>) Set[]::new)); + // build the model - return Optional.of(buildModel(data)); + return Success.of(buildModel(data)); } catch (FactoryException e) { - // only catch FactoryExceptions, as more serious exceptions should be handled elsewhere - log.error( - "An error occurred when creating instance of {}.class.", - data.getTargetClass().getSimpleName(), - e); + return Failure.of( + new FactoryException( + "An error occurred when creating instance of " + + data.getTargetClass().getSimpleName() + + ".class.", + e)); } - return Optional.empty(); + } + + /** + * Builds entity with data from given EntityData object after doing all kinds of checks on the + * data + * + * @param data EntityData (or subclass) containing the data wrapped in a {@link Try} + * @return An entity wrapped in a {@link Success} if successful, or an exception wrapped in a + * {@link Failure} + */ + public Try get(Try data) { + return data.transformF(FactoryException::new).flatMap(this::get); } /** diff --git a/src/main/java/edu/ie3/datamodel/io/factory/timeseries/CosmoIdCoordinateFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/timeseries/CosmoIdCoordinateFactory.java index 44d38c444..18920df21 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/timeseries/CosmoIdCoordinateFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/timeseries/CosmoIdCoordinateFactory.java @@ -7,7 +7,9 @@ import edu.ie3.datamodel.io.factory.SimpleFactoryData; import edu.ie3.util.geo.GeoUtils; -import java.util.*; +import java.util.Collections; +import java.util.List; +import java.util.Set; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.jts.geom.Point; diff --git a/src/main/java/edu/ie3/datamodel/io/factory/timeseries/SqlIdCoordinateFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/timeseries/SqlIdCoordinateFactory.java new file mode 100644 index 000000000..fb1d053f2 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/factory/timeseries/SqlIdCoordinateFactory.java @@ -0,0 +1,66 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.factory.timeseries; + +import edu.ie3.datamodel.exceptions.FactoryException; +import edu.ie3.datamodel.io.factory.SimpleFactoryData; +import edu.ie3.util.geo.GeoUtils; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import org.apache.commons.lang3.tuple.Pair; +import org.locationtech.jts.geom.Coordinate; +import org.locationtech.jts.geom.Point; +import org.locationtech.jts.io.ParseException; +import org.locationtech.jts.io.WKBReader; + +public class SqlIdCoordinateFactory extends IdCoordinateFactory { + private static final String COORDINATE_ID = "id"; + private static final String COORDINATE = "coordinate"; + private final WKBReader reader = new WKBReader(); + + @Override + protected Pair buildModel(SimpleFactoryData data) { + try { + int coordinateId = data.getInt(COORDINATE_ID); + byte[] byteArr = WKBReader.hexToBytes(data.getField(COORDINATE)); + + Coordinate coordinate = reader.read(byteArr).getCoordinate(); + + Point point = GeoUtils.buildPoint(coordinate); + return Pair.of(coordinateId, point); + + } catch (ParseException e) { + throw new FactoryException(e); + } + } + + @Override + protected List> getFields(SimpleFactoryData data) { + return Collections.singletonList(newSet(COORDINATE_ID, COORDINATE)); + } + + @Override + public String getIdField() { + return COORDINATE_ID; + } + + @Override + public String getLatField() { + throw new UnsupportedOperationException( + "This is not supported by " + SqlIdCoordinateFactory.class + "!"); + } + + @Override + public String getLonField() { + throw new UnsupportedOperationException( + "this is not supported by " + SqlIdCoordinateFactory.class + "!"); + } + + public String getCoordinateField() { + return COORDINATE; + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/naming/DatabaseNamingStrategy.java b/src/main/java/edu/ie3/datamodel/io/naming/DatabaseNamingStrategy.java index ba35c3b08..e59727668 100644 --- a/src/main/java/edu/ie3/datamodel/io/naming/DatabaseNamingStrategy.java +++ b/src/main/java/edu/ie3/datamodel/io/naming/DatabaseNamingStrategy.java @@ -6,11 +6,22 @@ package edu.ie3.datamodel.io.naming; import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme; +import edu.ie3.datamodel.models.UniqueEntity; +import java.util.Optional; /** A naming strategy for database entities */ public class DatabaseNamingStrategy { private static final String TIME_SERIES_PREFIX = "time_series_"; + private final EntityPersistenceNamingStrategy entityPersistenceNamingStrategy; + + public DatabaseNamingStrategy(EntityPersistenceNamingStrategy entityPersistenceNamingStrategy) { + this.entityPersistenceNamingStrategy = entityPersistenceNamingStrategy; + } + + public DatabaseNamingStrategy() { + this(new EntityPersistenceNamingStrategy()); + } /** * Provides the String that all time series tables are prefixed with @@ -30,4 +41,8 @@ public String getTimeSeriesPrefix() { public String getTimeSeriesEntityName(ColumnScheme columnScheme) { return TIME_SERIES_PREFIX + columnScheme.getScheme(); } + + public Optional getEntityName(Class cls) { + return entityPersistenceNamingStrategy.getEntityName(cls); + } } diff --git a/src/main/java/edu/ie3/datamodel/io/naming/DefaultDirectoryHierarchy.java b/src/main/java/edu/ie3/datamodel/io/naming/DefaultDirectoryHierarchy.java index cabbcb01b..522222b2c 100644 --- a/src/main/java/edu/ie3/datamodel/io/naming/DefaultDirectoryHierarchy.java +++ b/src/main/java/edu/ie3/datamodel/io/naming/DefaultDirectoryHierarchy.java @@ -31,7 +31,6 @@ import edu.ie3.datamodel.models.result.thermal.ThermalUnitResult; import edu.ie3.datamodel.models.timeseries.TimeSeries; import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileInput; -import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -47,9 +46,6 @@ public class DefaultDirectoryHierarchy implements FileHierarchy { private static final Logger logger = LoggerFactory.getLogger(DefaultDirectoryHierarchy.class); - /** Use the unix file separator here. */ - protected static final String FILE_SEPARATOR = File.separator; - /** Base directory for this specific grid model. The base path should be a directory. */ private final Path baseDirectory; @@ -64,41 +60,27 @@ public class DefaultDirectoryHierarchy implements FileHierarchy { private final Path resultTree; - public DefaultDirectoryHierarchy(String baseDirectory, String gridName) { + public DefaultDirectoryHierarchy(Path baseDirectory, String gridName) { /* Prepare the base path */ - String baseDirectoryNormalized = - FilenameUtils.normalizeNoEndSeparator(baseDirectory, true) + FILE_SEPARATOR; - this.baseDirectory = Paths.get(baseDirectoryNormalized).toAbsolutePath(); - this.projectDirectory = - Paths.get( - baseDirectoryNormalized - + FilenameUtils.normalizeNoEndSeparator(gridName, true) - + FILE_SEPARATOR) - .toAbsolutePath(); + Path baseDirectoryNormalized = + Path.of(FilenameUtils.normalizeNoEndSeparator(String.valueOf(baseDirectory), true)); + this.baseDirectory = baseDirectoryNormalized.toAbsolutePath(); + this.projectDirectory = baseDirectoryNormalized.resolve(gridName).toAbsolutePath(); /* Prepare the sub directories by appending the relative path to base path and mapping to information about being mandatory */ this.subDirectories = Arrays.stream(SubDirectories.values()) .collect( Collectors.toMap( - subDirectory -> - Paths.get( - FilenameUtils.concat( - this.projectDirectory.toString(), subDirectory.getRelPath())), + subDirectory -> this.projectDirectory.resolve(subDirectory.getRelPath()), SubDirectories::isMandatory)); - inputTree = - Paths.get( - FilenameUtils.concat( - projectDirectory.toString(), SubDirectories.Constants.INPUT_SUB_TREE)); - resultTree = - Paths.get( - FilenameUtils.concat( - projectDirectory.toString(), SubDirectories.Constants.RESULT_SUB_TREE)); + inputTree = projectDirectory.resolve(SubDirectories.Constants.INPUT_SUB_TREE); + resultTree = projectDirectory.resolve(SubDirectories.Constants.RESULT_SUB_TREE); } /** - * Checks, if the structure beneath {@link #baseDirectory} is okay. + * Checks, if the structure beneath {@link #projectDirectory} is okay. * * @throws FileException if not */ @@ -140,7 +122,7 @@ private void checkExpectedDirectories() throws FileException { } /** - * Checks the elements, that are further available underneath the {@link this#baseDirectory}. If + * Checks the elements, that are further available underneath the {@link #projectDirectory}. If * there is a directory, that is neither mandatory, nor optional, raise an Exception. * * @throws FileException if there is an unexpected directory @@ -195,21 +177,22 @@ public void createDirs(boolean withOptionals) throws IOException { * Gives the {@link #baseDirectory}). * * @return An Option to the base directory as a string + * @deprecated Use {@link edu.ie3.datamodel.io.connectors.CsvFileConnector} instead */ + @Deprecated(since = "3.0", forRemoval = true) @Override - public Optional getBaseDirectory() { - return Optional.of(this.baseDirectory.toString()); + public Optional getBaseDirectory() { + return Optional.of(this.baseDirectory); } /** * Gives the correct sub directory (w.r.t. {@link #baseDirectory}) for the provided class. * * @param cls Class to define the sub directory for - * @param fileSeparator The file separator to use * @return An Option to the regarding sub directory as a string */ @Override - public Optional getSubDirectory(Class cls, String fileSeparator) { + public Optional getSubDirectory(Class cls) { /* Go through all sub directories and check, if the given class belongs to one of the classes mapped to the sub directories. */ Optional maybeSubDirectory = Arrays.stream(SubDirectories.values()) @@ -224,11 +207,8 @@ public Optional getSubDirectory(Class cls, Strin return Optional.empty(); } else { /* Build the full path and then refer it to the base directory */ - Path fullPath = - Paths.get( - FilenameUtils.concat( - this.projectDirectory.toString(), maybeSubDirectory.get().getRelPath())); - String relPath = this.baseDirectory.relativize(fullPath).toString(); + Path fullPath = this.projectDirectory.resolve(maybeSubDirectory.get().getRelPath()); + Path relPath = this.baseDirectory.relativize(fullPath); return Optional.of(relPath); } @@ -236,7 +216,7 @@ public Optional getSubDirectory(Class cls, Strin private enum SubDirectories { GRID_INPUT( - Constants.INPUT_SUB_TREE + FILE_SEPARATOR + "grid" + FILE_SEPARATOR, + Constants.INPUT_SUB_TREE.resolve("grid"), true, Stream.of( LineInput.class, @@ -247,7 +227,7 @@ private enum SubDirectories { NodeInput.class) .collect(Collectors.toSet())), GRID_RESULT( - Constants.RESULT_SUB_TREE + FILE_SEPARATOR + "grid" + FILE_SEPARATOR, + Constants.RESULT_SUB_TREE.resolve("grid"), false, Stream.of( LineResult.class, @@ -257,7 +237,7 @@ private enum SubDirectories { NodeResult.class) .collect(Collectors.toSet())), GLOBAL( - Constants.INPUT_SUB_TREE + FILE_SEPARATOR + "global" + FILE_SEPARATOR, + Constants.INPUT_SUB_TREE.resolve("global"), true, Stream.of( LineTypeInput.class, @@ -275,7 +255,7 @@ private enum SubDirectories { LoadProfileInput.class) .collect(Collectors.toSet())), PARTICIPANTS_INPUT( - Constants.INPUT_SUB_TREE + FILE_SEPARATOR + "participants" + FILE_SEPARATOR, + Constants.INPUT_SUB_TREE.resolve("participants"), true, Stream.of( BmInput.class, @@ -290,7 +270,7 @@ private enum SubDirectories { WecInput.class) .collect(Collectors.toSet())), PARTICIPANTS_RESULTS( - Constants.RESULT_SUB_TREE + FILE_SEPARATOR + "participants" + FILE_SEPARATOR, + Constants.RESULT_SUB_TREE.resolve("participants"), false, Stream.of( BmResult.class, @@ -307,27 +287,27 @@ private enum SubDirectories { FlexOptionsResult.class) .collect(Collectors.toSet())), TIME_SERIES( - PARTICIPANTS_INPUT.relPath + "time_series" + FILE_SEPARATOR, + PARTICIPANTS_INPUT.relPath.resolve("time_series"), false, Stream.of(TimeSeries.class, TimeSeriesMappingSource.MappingEntry.class) .collect(Collectors.toSet())), THERMAL_INPUT( - Constants.INPUT_SUB_TREE + FILE_SEPARATOR + "thermal" + FILE_SEPARATOR, + Constants.INPUT_SUB_TREE.resolve("thermal"), false, Stream.of(ThermalUnitInput.class, ThermalBusInput.class).collect(Collectors.toSet())), THERMAL_RESULTS( - Constants.RESULT_SUB_TREE + FILE_SEPARATOR + "thermal" + FILE_SEPARATOR, + Constants.RESULT_SUB_TREE.resolve("thermal"), false, Stream.of(ThermalUnitResult.class).collect(Collectors.toSet())), GRAPHICS( - Constants.INPUT_SUB_TREE + FILE_SEPARATOR + "graphics" + FILE_SEPARATOR, + Constants.INPUT_SUB_TREE.resolve("graphics"), false, Stream.of(GraphicInput.class).collect(Collectors.toSet())); - private final String relPath; + private final Path relPath; private final boolean mandatory; private final Set> relevantClasses; - public String getRelPath() { + public Path getRelPath() { return relPath; } @@ -339,15 +319,15 @@ public Set> getRelevantClasses() { return relevantClasses; } - SubDirectories(String relPath, boolean mandatory, Set> relevantClasses) { + SubDirectories(Path relPath, boolean mandatory, Set> relevantClasses) { this.relPath = relPath; this.mandatory = mandatory; this.relevantClasses = Collections.unmodifiableSet(relevantClasses); } private static class Constants { - private static final String INPUT_SUB_TREE = "input"; - private static final String RESULT_SUB_TREE = "results"; + private static final Path INPUT_SUB_TREE = Paths.get("input"); + private static final Path RESULT_SUB_TREE = Paths.get("results"); } } } diff --git a/src/main/java/edu/ie3/datamodel/io/naming/EntityPersistenceNamingStrategy.java b/src/main/java/edu/ie3/datamodel/io/naming/EntityPersistenceNamingStrategy.java index 566c1b86f..d46cdf8bf 100644 --- a/src/main/java/edu/ie3/datamodel/io/naming/EntityPersistenceNamingStrategy.java +++ b/src/main/java/edu/ie3/datamodel/io/naming/EntityPersistenceNamingStrategy.java @@ -124,34 +124,6 @@ public Pattern getIndividualTimeSeriesPattern() { return individualTimeSeriesPattern; } - /** - * Extracts meta information from a valid source name for an individual time series - * - * @param sourceName Name of the source to extract information from, e.g. file name or SQL table - * name - * @return Meta information form individual time series source name - * @deprecated since 3.0. Use {@link #individualTimesSeriesMetaInformation(String)} instead - */ - @Deprecated(since = "3.0", forRemoval = true) - public edu.ie3.datamodel.io.csv.timeseries.IndividualTimeSeriesMetaInformation - extractIndividualTimesSeriesMetaInformation(String sourceName) { - Matcher matcher = getIndividualTimeSeriesPattern().matcher(sourceName); - if (!matcher.matches()) - throw new IllegalArgumentException( - "Cannot extract meta information on individual time series from '" + sourceName + "'."); - - String columnSchemeKey = matcher.group("columnScheme"); - edu.ie3.datamodel.io.csv.timeseries.ColumnScheme columnScheme = - edu.ie3.datamodel.io.csv.timeseries.ColumnScheme.parse(columnSchemeKey) - .orElseThrow( - () -> - new IllegalArgumentException( - "Cannot parse '" + columnSchemeKey + "' to valid column scheme.")); - - return new edu.ie3.datamodel.io.csv.timeseries.IndividualTimeSeriesMetaInformation( - UUID.fromString(matcher.group("uuid")), columnScheme); - } - /** * Extracts meta information from a valid source name for an individual time series * @@ -178,25 +150,6 @@ public IndividualTimeSeriesMetaInformation individualTimesSeriesMetaInformation( UUID.fromString(matcher.group("uuid")), columnScheme); } - /** - * Extracts meta information from a valid file name for a load profile time series - * - * @param fileName File name to extract information from - * @return Meta information form load profile time series file name - * @deprecated since 3.0. Use {@link #loadProfileTimesSeriesMetaInformation(String)} instead - */ - @Deprecated(since = "3.0", forRemoval = true) - public edu.ie3.datamodel.io.csv.timeseries.LoadProfileTimeSeriesMetaInformation - extractLoadProfileTimesSeriesMetaInformation(String fileName) { - Matcher matcher = getLoadProfileTimeSeriesPattern().matcher(fileName); - if (!matcher.matches()) - throw new IllegalArgumentException( - "Cannot extract meta information on load profile time series from '" + fileName + "'."); - - return new edu.ie3.datamodel.io.csv.timeseries.LoadProfileTimeSeriesMetaInformation( - UUID.fromString(matcher.group("uuid")), matcher.group("profile")); - } - /** * Extracts meta information from a valid file name for a load profile time series * diff --git a/src/main/java/edu/ie3/datamodel/io/naming/FileHierarchy.java b/src/main/java/edu/ie3/datamodel/io/naming/FileHierarchy.java index 0f3e1b7a0..d98747fe0 100644 --- a/src/main/java/edu/ie3/datamodel/io/naming/FileHierarchy.java +++ b/src/main/java/edu/ie3/datamodel/io/naming/FileHierarchy.java @@ -6,7 +6,7 @@ package edu.ie3.datamodel.io.naming; import edu.ie3.datamodel.models.UniqueEntity; -import java.io.File; +import java.nio.file.Path; import java.util.Optional; /** @@ -16,29 +16,20 @@ public interface FileHierarchy { /** * Determines the correct subdirectory (w.r.t. an arbitrary base directory) for a certain given - * class using the provided file separator for delimiting between directories and files. + * class using the file separator provided by {@link Path} for delimiting between directories and + * files. * * @param cls Class to define the sub directory for - * @param fileSeparator The file separator to use * @return An Option to the regarding sub directory as a string */ - Optional getSubDirectory(Class cls, String fileSeparator); - - /** - * Determines the correct subdirectory (w.r.t. an arbitrary base directory) for a certain given - * class using the Unix file separator for delimiting between directories and files. - * - * @param cls Class to define the sub directory for - * @return An Option to the regarding sub directory as a string - */ - default Optional getSubDirectory(Class cls) { - return getSubDirectory(cls, File.separator); - } + Optional getSubDirectory(Class cls); /** * Determines the base directory. * * @return An option to the base directory + * @deprecated Use {@link edu.ie3.datamodel.io.connectors.CsvFileConnector} instead */ - Optional getBaseDirectory(); + @Deprecated(since = "3.0", forRemoval = true) + Optional getBaseDirectory(); } diff --git a/src/main/java/edu/ie3/datamodel/io/naming/FileNamingStrategy.java b/src/main/java/edu/ie3/datamodel/io/naming/FileNamingStrategy.java index 7db3a984b..100aace7a 100644 --- a/src/main/java/edu/ie3/datamodel/io/naming/FileNamingStrategy.java +++ b/src/main/java/edu/ie3/datamodel/io/naming/FileNamingStrategy.java @@ -6,15 +6,14 @@ package edu.ie3.datamodel.io.naming; import edu.ie3.datamodel.io.IoUtil; -import edu.ie3.datamodel.io.csv.FileNameMetaInformation; import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation; -import edu.ie3.datamodel.io.naming.timeseries.LoadProfileTimeSeriesMetaInformation; import edu.ie3.datamodel.models.UniqueEntity; import edu.ie3.datamodel.models.timeseries.TimeSeries; import edu.ie3.datamodel.models.timeseries.TimeSeriesEntry; import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries; import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileInput; import edu.ie3.datamodel.models.value.Value; +import edu.ie3.datamodel.utils.FileUtils; import java.nio.file.Path; import java.util.Optional; import java.util.regex.Pattern; @@ -72,11 +71,8 @@ public FileNamingStrategy() { * @param cls Targeted class of the given file * @return An optional sub path to the actual file */ - public Optional getFilePath(Class cls) { - // do not adapt orElseGet, see https://www.baeldung.com/java-optional-or-else-vs-or-else-get for - // details - return getFilePath( - getEntityName(cls).orElseGet(() -> ""), getDirectoryPath(cls).orElseGet(() -> "")); + public Optional getFilePath(Class cls) { + return FileUtils.of(getEntityName(cls), getDirectoryPath(cls)); } /** @@ -90,12 +86,9 @@ public Optional getFilePath(Class cls) { * @return An optional sub path to the actual file */ public , E extends TimeSeriesEntry, V extends Value> - Optional getFilePath(T timeSeries) { - // do not adapt orElseGet, see https://www.baeldung.com/java-optional-or-else-vs-or-else-get for - // details - return getFilePath( - entityPersistenceNamingStrategy.getEntityName(timeSeries).orElseGet(() -> ""), - getDirectoryPath(timeSeries).orElseGet(() -> "")); + Optional getFilePath(T timeSeries) { + return FileUtils.of( + entityPersistenceNamingStrategy.getEntityName(timeSeries), getDirectoryPath(timeSeries)); } /** @@ -105,12 +98,14 @@ Optional getFilePath(T timeSeries) { * @param fileName File name * @param subDirectories Sub directory path * @return Concatenation of sub directory structure and file name + * @deprecated replaced with {@link FileUtils#of(String, Optional)} */ - private Optional getFilePath(String fileName, String subDirectories) { + @Deprecated(since = "3.0", forRemoval = true) + private Optional getFilePath(String fileName, Optional subDirectories) { if (fileName.isEmpty()) return Optional.empty(); - if (!subDirectories.isEmpty()) - return Optional.of(FilenameUtils.concat(subDirectories, fileName)); - else return Optional.of(fileName); + return subDirectories + .map(path -> path.resolve(fileName)) + .or(() -> Optional.of(Path.of(fileName))); } /** @@ -120,19 +115,14 @@ private Optional getFilePath(String fileName, String subDirectories) { * @param cls Targeted class of the given file * @return An optional sub directory path */ - public Optional getDirectoryPath(Class cls) { - Optional maybeDirectoryName = fileHierarchy.getSubDirectory(cls); + public Optional getDirectoryPath(Class cls) { + Optional maybeDirectoryName = fileHierarchy.getSubDirectory(cls); if (maybeDirectoryName.isEmpty()) { logger.debug("Cannot determine directory name for class '{}'.", cls); return Optional.empty(); } else { /* Make sure, the directory path does not start or end with file separator and in between the separator is harmonized */ - return Optional.of( - IoUtil.harmonizeFileSeparator( - maybeDirectoryName - .get() - .replaceFirst("^" + IoUtil.FILE_SEPARATOR_REGEX, "") - .replaceAll(IoUtil.FILE_SEPARATOR_REGEX + "$", ""))); + return maybeDirectoryName.map(IoUtil::harmonizeFileSeparator); } } @@ -147,19 +137,14 @@ public Optional getDirectoryPath(Class cls) { * @return An optional sub directory path */ public , E extends TimeSeriesEntry, V extends Value> - Optional getDirectoryPath(T timeSeries) { - Optional maybeDirectoryName = fileHierarchy.getSubDirectory(timeSeries.getClass()); + Optional getDirectoryPath(T timeSeries) { + Optional maybeDirectoryName = fileHierarchy.getSubDirectory(timeSeries.getClass()); if (maybeDirectoryName.isEmpty()) { logger.debug("Cannot determine directory name for time series '{}'.", timeSeries); return Optional.empty(); } else { /* Make sure, the directory path does not start or end with file separator and in between the separator is harmonized */ - return Optional.of( - IoUtil.harmonizeFileSeparator( - maybeDirectoryName - .get() - .replaceFirst("^" + IoUtil.FILE_SEPARATOR_REGEX, "") - .replaceAll(IoUtil.FILE_SEPARATOR_REGEX + "$", ""))); + return maybeDirectoryName.map(IoUtil::harmonizeFileSeparator); } } @@ -170,8 +155,7 @@ Optional getDirectoryPath(T timeSeries) { * @return An individual time series pattern */ public Pattern getIndividualTimeSeriesPattern() { - String subDirectory = - fileHierarchy.getSubDirectory(IndividualTimeSeries.class).orElseGet(() -> ""); + Optional subDirectory = fileHierarchy.getSubDirectory(IndividualTimeSeries.class); if (subDirectory.isEmpty()) { return entityPersistenceNamingStrategy.getIndividualTimeSeriesPattern(); @@ -180,7 +164,7 @@ public Pattern getIndividualTimeSeriesPattern() { * finally escaping them */ String joined = FilenameUtils.concat( - subDirectory, + subDirectory.get().toString(), entityPersistenceNamingStrategy.getIndividualTimeSeriesPattern().pattern()); String harmonized = IoUtil.harmonizeFileSeparator(joined); String escaped = harmonized.replace("\\", "\\\\"); @@ -196,7 +180,7 @@ public Pattern getIndividualTimeSeriesPattern() { * @return A load profile time series pattern */ public Pattern getLoadProfileTimeSeriesPattern() { - String subDirectory = fileHierarchy.getSubDirectory(LoadProfileInput.class).orElseGet(() -> ""); + Optional subDirectory = fileHierarchy.getSubDirectory(LoadProfileInput.class); if (subDirectory.isEmpty()) { return entityPersistenceNamingStrategy.getLoadProfileTimeSeriesPattern(); @@ -205,7 +189,7 @@ public Pattern getLoadProfileTimeSeriesPattern() { * finally escaping them */ String joined = FilenameUtils.concat( - subDirectory, + subDirectory.get().toString(), entityPersistenceNamingStrategy.getLoadProfileTimeSeriesPattern().pattern()); String harmonized = IoUtil.harmonizeFileSeparator(joined); String escaped = harmonized.replace("\\", "\\\\"); @@ -214,22 +198,6 @@ public Pattern getLoadProfileTimeSeriesPattern() { } } - /** - * Extracts meta information from a file name, of a time series. - * - * @param path Path to the file - * @return The meeting meta information - * @deprecated since 3.0. Use {@link #timeSeriesMetaInformation(Path)} instead. - */ - @Deprecated(since = "3.0", forRemoval = true) - public FileNameMetaInformation extractTimeSeriesMetaInformation(Path path) { - /* Extract file name from possibly fully qualified path */ - Path fileName = path.getFileName(); - if (fileName == null) - throw new IllegalArgumentException("Unable to extract file name from path '" + path + "'."); - return extractTimeSeriesMetaInformation(fileName.toString()); - } - /** * Extracts meta information from a file name, of a time series. * @@ -244,27 +212,6 @@ public TimeSeriesMetaInformation timeSeriesMetaInformation(Path path) { return timeSeriesMetaInformation(fileName.toString()); } - /** - * Extracts meta information from a file name, of a time series. Here, a file name without - * leading path has to be provided - * - * @param fileName File name - * @return The meeting meta information - * @deprecated since 3.0. Use {@link #timeSeriesMetaInformation(String)} instead. - */ - @Deprecated(since = "3.0", forRemoval = true) - public FileNameMetaInformation extractTimeSeriesMetaInformation(String fileName) { - - TimeSeriesMetaInformation meta = timeSeriesMetaInformation(fileName); - if (meta instanceof IndividualTimeSeriesMetaInformation ind) { - return new edu.ie3.datamodel.io.csv.timeseries.IndividualTimeSeriesMetaInformation(ind); - } else if (meta instanceof LoadProfileTimeSeriesMetaInformation load) { - return new edu.ie3.datamodel.io.csv.timeseries.LoadProfileTimeSeriesMetaInformation(load); - } else - throw new IllegalArgumentException( - "Unknown format of '" + fileName + "'. Cannot extract meta information."); - } - /** * Extracts meta information from a file name, of a time series. Here, a file name without * leading path has to be provided @@ -294,6 +241,10 @@ public static String removeFileNameEnding(String fileName) { return fileName.replaceAll("(?:\\.[^.\\\\/\\s]{1,255}){1,2}$", ""); } + public static Path removeFileNameEnding(Path filename) { + return Path.of(removeFileNameEnding(filename.toString())); + } + /** * Get the entity name for coordinates * @@ -309,12 +260,13 @@ public String getIdCoordinateEntityName() { * extension. * * @return An optional sub path to the id coordinate file + * @deprecated unused, no substitute */ - public Optional getIdCoordinateFilePath() { + @Deprecated(since = "3.0", forRemoval = true) + public Optional getIdCoordinateFilePath() { // do not adapt orElseGet, see https://www.baeldung.com/java-optional-or-else-vs-or-else-get for // details - return getFilePath( - getIdCoordinateEntityName(), fileHierarchy.getBaseDirectory().orElseGet(() -> "")); + return Optional.of(FileUtils.of(getIdCoordinateEntityName(), fileHierarchy.getBaseDirectory())); } /** diff --git a/src/main/java/edu/ie3/datamodel/io/naming/FlatDirectoryHierarchy.java b/src/main/java/edu/ie3/datamodel/io/naming/FlatDirectoryHierarchy.java index 874322234..9e7e352d3 100644 --- a/src/main/java/edu/ie3/datamodel/io/naming/FlatDirectoryHierarchy.java +++ b/src/main/java/edu/ie3/datamodel/io/naming/FlatDirectoryHierarchy.java @@ -6,6 +6,7 @@ package edu.ie3.datamodel.io.naming; import edu.ie3.datamodel.models.UniqueEntity; +import java.nio.file.Path; import java.util.Optional; /** Default directory hierarchy for input models */ @@ -15,11 +16,10 @@ public class FlatDirectoryHierarchy implements FileHierarchy { * Gives empty sub directory. * * @param cls Class to define the sub directory for - * @param fileSeparator The file separator to use * @return An Option to the regarding sub directory as a string */ @Override - public Optional getSubDirectory(Class cls, String fileSeparator) { + public Optional getSubDirectory(Class cls) { return Optional.empty(); } @@ -27,9 +27,11 @@ public Optional getSubDirectory(Class cls, Strin * Gives the baseDirectory, which is Empty. * * @return An Option to the base directory as a string + * @deprecated Use {@link edu.ie3.datamodel.io.connectors.CsvFileConnector} instead */ + @Deprecated(since = "3.0", forRemoval = true) @Override - public Optional getBaseDirectory() { + public Optional getBaseDirectory() { return Optional.empty(); } } diff --git a/src/main/java/edu/ie3/datamodel/io/processor/EntityProcessor.java b/src/main/java/edu/ie3/datamodel/io/processor/EntityProcessor.java index 3209799f9..a8c79e735 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/EntityProcessor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/EntityProcessor.java @@ -8,6 +8,9 @@ import edu.ie3.datamodel.exceptions.EntityProcessorException; import edu.ie3.datamodel.models.StandardUnits; import edu.ie3.datamodel.models.UniqueEntity; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.*; +import edu.ie3.util.exceptions.QuantityException; import java.lang.reflect.Method; import java.util.*; import javax.measure.Quantity; @@ -37,7 +40,7 @@ public abstract class EntityProcessor extends Processor< * * @param registeredClass the class the entity processor should be able to handle */ - protected EntityProcessor(Class registeredClass) { + protected EntityProcessor(Class registeredClass) throws EntityProcessorException { super(registeredClass); this.fieldNameToMethod = mapFieldNameToGetter(registeredClass, Collections.singleton(NODE_INTERNAL)); @@ -51,7 +54,7 @@ protected EntityProcessor(Class registeredClass) { * @return an optional Map with fieldName to fieldValue or an empty optional if an error occurred * during processing */ - public Optional> handleEntity(T entity) { + public LinkedHashMap handleEntity(T entity) throws EntityProcessorException { if (!registeredClass.equals(entity.getClass())) throw new EntityProcessorException( "Cannot process " @@ -62,33 +65,32 @@ public Optional> handleEntity(T entity) { + entity.getClass().getSimpleName() + ".class!"); - try { - return Optional.of(processObject(entity, fieldNameToMethod)); - } catch (EntityProcessorException e) { - logger.error("Cannot process the entity{}.", entity, e); - return Optional.empty(); - } + return processObject(entity, fieldNameToMethod); } @Override - protected Optional handleProcessorSpecificQuantity( + protected Try handleProcessorSpecificQuantity( Quantity quantity, String fieldName) { return switch (fieldName) { case "energy", "eConsAnnual", "eStorage": - yield quantityValToOptionalString( - quantity.asType(Energy.class).to(StandardUnits.ENERGY_IN)); + yield Success.of( + quantityValToOptionalString(quantity.asType(Energy.class).to(StandardUnits.ENERGY_IN))); case "q": - yield quantityValToOptionalString( - quantity.asType(Power.class).to(StandardUnits.REACTIVE_POWER_IN)); + yield Success.of( + quantityValToOptionalString( + quantity.asType(Power.class).to(StandardUnits.REACTIVE_POWER_IN))); case "p", "pMax", "pOwn", "pThermal": - yield quantityValToOptionalString( - quantity.asType(Power.class).to(StandardUnits.ACTIVE_POWER_IN)); + yield Success.of( + quantityValToOptionalString( + quantity.asType(Power.class).to(StandardUnits.ACTIVE_POWER_IN))); default: - log.error( - "Cannot process quantity with value '{}' for field with name {} in input entity processing!", - quantity, - fieldName); - yield Optional.empty(); + yield Failure.of( + new QuantityException( + "Cannot process quantity with value '" + + quantity + + "' for field with name " + + fieldName + + " in input entity processing!")); }; } diff --git a/src/main/java/edu/ie3/datamodel/io/processor/Processor.java b/src/main/java/edu/ie3/datamodel/io/processor/Processor.java index 36f197db6..830fbef02 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/Processor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/Processor.java @@ -17,6 +17,9 @@ import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicInput; import edu.ie3.datamodel.models.profile.LoadProfile; import edu.ie3.datamodel.models.voltagelevels.VoltageLevel; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.*; +import edu.ie3.util.exceptions.QuantityException; import java.beans.Introspector; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -72,7 +75,7 @@ public abstract class Processor { * * @param foreSeenClass Class and its children that are foreseen to be handled with this processor */ - protected Processor(Class foreSeenClass) { + protected Processor(Class foreSeenClass) throws EntityProcessorException { if (!getEligibleEntityClasses().contains(foreSeenClass)) throw new EntityProcessorException( "Cannot register class '" @@ -104,9 +107,10 @@ public int compare(String a, String b) { * Maps the foreseen table fields to the objects getters * * @param cls class to use for mapping - * @return an array of strings of all field values of the class + * @return a map of all field values of the class */ - protected SortedMap mapFieldNameToGetter(Class cls) { + protected SortedMap mapFieldNameToGetter(Class cls) + throws EntityProcessorException { return mapFieldNameToGetter(cls, Collections.emptyList()); } @@ -115,10 +119,10 @@ protected SortedMap mapFieldNameToGetter(Class cls) { * * @param cls class to use for mapping * @param ignoreFields A collection of all field names to ignore during process - * @return an array of strings of all field values of the class + * @return a map of all field values of the class */ protected SortedMap mapFieldNameToGetter( - Class cls, Collection ignoreFields) { + Class cls, Collection ignoreFields) throws EntityProcessorException { try { final LinkedHashMap resFieldNameToMethod = new LinkedHashMap<>(); Arrays.stream(Introspector.getBeanInfo(cls, Object.class).getPropertyDescriptors()) @@ -178,7 +182,7 @@ public static SortedMap putUuidFirst(Map unsorted) { * @return Mapping from field name to value as String representation */ protected LinkedHashMap processObject( - Object object, Map fieldNameToGetter) { + Object object, Map fieldNameToGetter) throws EntityProcessorException { try { LinkedHashMap resultMap = new LinkedHashMap<>(); for (Map.Entry entry : fieldNameToGetter.entrySet()) { @@ -207,7 +211,8 @@ protected LinkedHashMap processObject( * @param fieldName Name of the foreseen field * @return A String representation of the result */ - protected String processMethodResult(Object methodReturnObject, Method method, String fieldName) { + protected String processMethodResult(Object methodReturnObject, Method method, String fieldName) + throws EntityProcessorException { StringBuilder resultStringBuilder = new StringBuilder(); @@ -232,15 +237,19 @@ protected String processMethodResult(Object methodReturnObject, Method method, S .map( o -> { if (o instanceof Quantity) { - return handleQuantity((Quantity) o, fieldName); + return Try.of( + () -> handleQuantity((Quantity) o, fieldName), + EntityProcessorException.class); } else { - throw new EntityProcessorException( - "Handling of " - + o.getClass().getSimpleName() - + ".class instance wrapped into Optional is currently not supported by entity processors!"); + return Failure.of( + new EntityProcessorException( + "Handling of " + + o.getClass().getSimpleName() + + ".class instance wrapped into Optional is currently not supported by entity processors!")); } }) - .orElse("")); + .orElse(Success.of("")) // (in case of empty optional) + .getOrThrow()); case "ZonedDateTime" -> resultStringBuilder.append( processZonedDateTime((ZonedDateTime) methodReturnObject)); case "OperationTime" -> resultStringBuilder.append( @@ -306,7 +315,8 @@ protected String processMethodResult(Object methodReturnObject, Method method, S * @return the resulting string of a VoltageLevel attribute value for the provided field or an * empty string when an invalid field name is provided */ - protected String processVoltageLevel(VoltageLevel voltageLevel, String fieldName) { + protected String processVoltageLevel(VoltageLevel voltageLevel, String fieldName) + throws EntityProcessorException { StringBuilder resultStringBuilder = new StringBuilder(); if (fieldName.equalsIgnoreCase(VOLT_LVL)) resultStringBuilder.append(voltageLevel.getId()); @@ -324,21 +334,26 @@ protected String processVoltageLevel(VoltageLevel voltageLevel, String fieldName * @return an optional string with the normalized to {@link StandardUnits} value of the quantity * or empty if an error occurred during processing */ - protected String handleQuantity(Quantity quantity, String fieldName) { - Optional optQuant; + protected String handleQuantity(Quantity quantity, String fieldName) + throws EntityProcessorException { + Try optQuant; if (specificQuantityFieldNames.contains(fieldName)) { optQuant = handleProcessorSpecificQuantity(quantity, fieldName); } else { - optQuant = quantityValToOptionalString(quantity); + optQuant = Success.of(quantityValToOptionalString(quantity)); } - return optQuant.orElseThrow( - () -> - new EntityProcessorException( - "Unable to process quantity value for attribute '" - + fieldName - + "' in entity " - + getRegisteredClass().getSimpleName() - + ".class.")); + + return optQuant + .transformF( + e -> + new EntityProcessorException( + "Unable to process quantity value for attribute '" + + fieldName + + "' in entity " + + getRegisteredClass().getSimpleName() + + ".class.", + e)) + .getOrThrow(); } /** @@ -354,7 +369,7 @@ protected String handleQuantity(Quantity quantity, String fieldName) { * @return an optional string with the normalized to {@link StandardUnits} value of the quantity * or empty if an error occurred during processing */ - protected abstract Optional handleProcessorSpecificQuantity( + protected abstract Try handleProcessorSpecificQuantity( Quantity quantity, String fieldName); protected String processUUIDArray(UUID[] uuids) { @@ -406,8 +421,8 @@ protected String processZonedDateTime(ZonedDateTime zonedDateTime) { * @param quantity Quantity to convert * @return A string of the quantity's value */ - protected Optional quantityValToOptionalString(Quantity quantity) { - return Optional.of(Double.toString(quantity.getValue().doubleValue())); + protected String quantityValToOptionalString(Quantity quantity) { + return Double.toString(quantity.getValue().doubleValue()); } /** diff --git a/src/main/java/edu/ie3/datamodel/io/processor/ProcessorProvider.java b/src/main/java/edu/ie3/datamodel/io/processor/ProcessorProvider.java index 8539ca52b..8f3bb42f4 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/ProcessorProvider.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/ProcessorProvider.java @@ -6,6 +6,7 @@ package edu.ie3.datamodel.io.processor; import edu.ie3.datamodel.exceptions.EntityProcessorException; +import edu.ie3.datamodel.exceptions.FailureException; import edu.ie3.datamodel.exceptions.ProcessorProviderException; import edu.ie3.datamodel.io.processor.input.InputEntityProcessor; import edu.ie3.datamodel.io.processor.result.ResultEntityProcessor; @@ -17,10 +18,10 @@ import edu.ie3.datamodel.models.timeseries.TimeSeries; import edu.ie3.datamodel.models.timeseries.TimeSeriesEntry; import edu.ie3.datamodel.models.value.Value; +import edu.ie3.datamodel.utils.Try; import java.util.*; +import java.util.function.Function; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Wrapper providing the class specific processor to convert an instance of a {@link UniqueEntity} @@ -34,8 +35,6 @@ */ public class ProcessorProvider { - private static final Logger log = LoggerFactory.getLogger(ProcessorProvider.class); - /** unmodifiable map of all processors that has been provided on construction */ private final Map, EntityProcessor> entityProcessors; @@ -47,7 +46,7 @@ public class ProcessorProvider { timeSeriesProcessors; /** Get an instance of this class with all existing entity processors */ - public ProcessorProvider() { + public ProcessorProvider() throws EntityProcessorException { this.entityProcessors = init(allEntityProcessors()); this.timeSeriesProcessors = allTimeSeriesProcessors(); } @@ -70,14 +69,14 @@ public ProcessorProvider( this.timeSeriesProcessors = timeSeriesProcessors; } - public Optional> handleEntity(T entity) { - try { - EntityProcessor processor = getEntityProcessor(entity.getClass()); - return castProcessor(processor).handleEntity(entity); - } catch (ProcessorProviderException e) { - log.error("Exception occurred during entity handling.", e); - return Optional.empty(); - } + public + Try, ProcessorProviderException> handleEntity(T entity) { + return Try.of(() -> getEntityProcessor(entity.getClass()), ProcessorProviderException.class) + .flatMap(ProcessorProvider::castProcessor) + .flatMap( + processor -> + Try.of(() -> processor.handleEntity(entity), EntityProcessorException.class) + .transformF(ProcessorProviderException::new)); } /** @@ -112,18 +111,15 @@ private EntityProcessor getEntityProcessor( * @return A set of mappings from field name to value */ public , E extends TimeSeriesEntry, V extends Value> - Optional>> handleTimeSeries(T timeSeries) { + Set> handleTimeSeries(T timeSeries) + throws ProcessorProviderException { TimeSeriesProcessorKey key = new TimeSeriesProcessorKey(timeSeries); - try { - TimeSeriesProcessor processor = getTimeSeriesProcessor(key); - return Optional.of(processor.handleTimeSeries(timeSeries)); - } catch (ProcessorProviderException e) { - log.error("Cannot handle the time series '{}'.", timeSeries, e); - return Optional.empty(); - } catch (EntityProcessorException e) { - log.error("Error during processing of time series.", e); - return Optional.empty(); - } + return Try.of(() -> this.getTimeSeriesProcessor(key), ProcessorProviderException.class) + .flatMap( + processor -> + Try.of(() -> processor.handleTimeSeries(timeSeries), EntityProcessorException.class) + .transformF(ProcessorProviderException::new)) + .getOrThrow(); } /** @@ -235,7 +231,8 @@ private Map, EntityProcessor> allEntityProcessors() { + public static Collection> allEntityProcessors() + throws EntityProcessorException { Collection> resultingProcessors = new ArrayList<>(); resultingProcessors.addAll(allInputEntityProcessors()); resultingProcessors.addAll(allResultEntityProcessors()); @@ -247,7 +244,8 @@ public static Collection> allEntityProce * * @return a collection of all input processors */ - public static Collection> allInputEntityProcessors() { + public static Collection> allInputEntityProcessors() + throws EntityProcessorException { Collection> resultingProcessors = new ArrayList<>(); for (Class cls : InputEntityProcessor.eligibleEntityClasses) { resultingProcessors.add(new InputEntityProcessor(cls)); @@ -260,7 +258,8 @@ public static Collection> allInputEntity * * @return a collection of all result processors */ - public static Collection> allResultEntityProcessors() { + public static Collection> allResultEntityProcessors() + throws EntityProcessorException { Collection> resultingProcessors = new ArrayList<>(); for (Class cls : ResultEntityProcessor.eligibleEntityClasses) { resultingProcessors.add(new ResultEntityProcessor(cls)); @@ -273,32 +272,43 @@ public static Collection> allResultEntit * * @return A mapping from eligible combinations to processors */ + @SuppressWarnings("unchecked") public static Map< TimeSeriesProcessorKey, TimeSeriesProcessor< TimeSeries, Value>, TimeSeriesEntry, Value>> - allTimeSeriesProcessors() { - return TimeSeriesProcessor.eligibleKeys.stream() - .collect( - Collectors.toMap( - key -> key, - key -> - new TimeSeriesProcessor<>( - (Class, Value>>) key.getTimeSeriesClass(), - (Class>) key.getEntryClass(), - (Class) key.getValueClass()))); + allTimeSeriesProcessors() throws EntityProcessorException { + try { + return Try.scanStream( + TimeSeriesProcessor.eligibleKeys.stream() + .map( + key -> + Try.of( + () -> + new TimeSeriesProcessor<>( + (Class, Value>>) + key.getTimeSeriesClass(), + (Class>) key.getEntryClass(), + (Class) key.getValueClass()), + EntityProcessorException.class)), + "list of processors") + .getOrThrow() + .collect(Collectors.toMap(TimeSeriesProcessor::getRegisteredKey, Function.identity())); + } catch (FailureException e) { + throw new EntityProcessorException(e.getCause()); + } } @SuppressWarnings("unchecked cast") - private static EntityProcessor castProcessor( - EntityProcessor processor) throws ProcessorProviderException { - try { - return (EntityProcessor) processor; - } catch (ClassCastException ex) { - throw new ProcessorProviderException( - "Cannot cast processor with registered class '" - + processor.getRegisteredClass().getSimpleName() - + "'. This indicates a fatal problem with the processor mapping!"); - } + private static + Try, ProcessorProviderException> castProcessor( + EntityProcessor processor) { + return Try.of(() -> (EntityProcessor) processor, ClassCastException.class) + .transformF( + e -> + new ProcessorProviderException( + "Cannot cast processor with registered class '" + + processor.getRegisteredClass().getSimpleName() + + "'. This indicates a fatal problem with the processor mapping!")); } } diff --git a/src/main/java/edu/ie3/datamodel/io/processor/input/InputEntityProcessor.java b/src/main/java/edu/ie3/datamodel/io/processor/input/InputEntityProcessor.java index 16a75b725..398f404aa 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/input/InputEntityProcessor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/input/InputEntityProcessor.java @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.processor.input; +import edu.ie3.datamodel.exceptions.EntityProcessorException; import edu.ie3.datamodel.io.processor.EntityProcessor; import edu.ie3.datamodel.io.source.TimeSeriesMappingSource; import edu.ie3.datamodel.models.input.*; @@ -73,7 +74,8 @@ public class InputEntityProcessor extends EntityProcessor { StorageTypeInput.class, WecTypeInput.class); - public InputEntityProcessor(Class registeredClass) { + public InputEntityProcessor(Class registeredClass) + throws EntityProcessorException { super(registeredClass); } diff --git a/src/main/java/edu/ie3/datamodel/io/processor/result/ResultEntityProcessor.java b/src/main/java/edu/ie3/datamodel/io/processor/result/ResultEntityProcessor.java index 93a792eda..177ed1909 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/result/ResultEntityProcessor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/result/ResultEntityProcessor.java @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.processor.result; +import edu.ie3.datamodel.exceptions.EntityProcessorException; import edu.ie3.datamodel.io.factory.result.SystemParticipantResultFactory; import edu.ie3.datamodel.io.processor.EntityProcessor; import edu.ie3.datamodel.models.StandardUnits; @@ -17,6 +18,9 @@ import edu.ie3.datamodel.models.result.system.*; import edu.ie3.datamodel.models.result.thermal.CylindricalStorageResult; import edu.ie3.datamodel.models.result.thermal.ThermalHouseResult; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.*; +import edu.ie3.util.exceptions.QuantityException; import java.util.*; import javax.measure.Quantity; import javax.measure.quantity.Energy; @@ -56,29 +60,35 @@ public class ResultEntityProcessor extends EntityProcessor { EmResult.class, FlexOptionsResult.class); - public ResultEntityProcessor(Class registeredClass) { + public ResultEntityProcessor(Class registeredClass) + throws EntityProcessorException { super(registeredClass); } @Override - protected Optional handleProcessorSpecificQuantity( + protected Try handleProcessorSpecificQuantity( Quantity quantity, String fieldName) { return switch (fieldName) { case "energy", "eConsAnnual", "eStorage": - yield quantityValToOptionalString( - quantity.asType(Energy.class).to(StandardUnits.ENERGY_RESULT)); + yield Success.of( + quantityValToOptionalString( + quantity.asType(Energy.class).to(StandardUnits.ENERGY_RESULT))); case "q": - yield quantityValToOptionalString( - quantity.asType(Power.class).to(StandardUnits.REACTIVE_POWER_RESULT)); + yield Success.of( + quantityValToOptionalString( + quantity.asType(Power.class).to(StandardUnits.REACTIVE_POWER_RESULT))); case "p", "pMax", "pOwn", "pThermal", "pRef", "pMin": - yield quantityValToOptionalString( - quantity.asType(Power.class).to(StandardUnits.ACTIVE_POWER_RESULT)); + yield Success.of( + quantityValToOptionalString( + quantity.asType(Power.class).to(StandardUnits.ACTIVE_POWER_RESULT))); default: - log.error( - "Cannot process quantity with value '{}' for field with name {} in result entity processing!", - quantity, - fieldName); - yield Optional.empty(); + yield Failure.of( + new QuantityException( + "Cannot process quantity with value '" + + quantity + + "' for field with name " + + fieldName + + " in result entity processing!")); }; } diff --git a/src/main/java/edu/ie3/datamodel/io/processor/timeseries/FieldSourceToMethod.java b/src/main/java/edu/ie3/datamodel/io/processor/timeseries/FieldSourceToMethod.java index 72b6b1fa8..21c05bb67 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/timeseries/FieldSourceToMethod.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/timeseries/FieldSourceToMethod.java @@ -12,19 +12,6 @@ * time series can be obtained from */ public record FieldSourceToMethod(FieldSource source, Method method) { - - /** @deprecated since 3.0. Use {@link #source()} instead */ - @Deprecated(since = "3.0") - public FieldSource getSource() { - return source; - } - - /** @deprecated since 3.0. Use {@link #method()} instead */ - @Deprecated(since = "3.0") - public Method getMethod() { - return method; - } - @Override public String toString() { return "FieldSourceToMethod{" + "source=" + source + ", method=" + method + '}'; diff --git a/src/main/java/edu/ie3/datamodel/io/processor/timeseries/TimeSeriesProcessor.java b/src/main/java/edu/ie3/datamodel/io/processor/timeseries/TimeSeriesProcessor.java index e4b9dbcfa..52f1a602a 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/timeseries/TimeSeriesProcessor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/timeseries/TimeSeriesProcessor.java @@ -66,7 +66,8 @@ public class TimeSeriesProcessor< private final String[] flattenedHeaderElements; - public TimeSeriesProcessor(Class timeSeriesClass, Class entryClass, Class valueClass) { + public TimeSeriesProcessor(Class timeSeriesClass, Class entryClass, Class valueClass) + throws EntityProcessorException { super(timeSeriesClass); /* Check, if this processor can handle the foreseen combination of time series, entry and value */ @@ -91,6 +92,10 @@ public TimeSeriesProcessor(Class timeSeriesClass, Class entryClass, Class< this.flattenedHeaderElements = fieldToSource.keySet().toArray(new String[0]); } + public TimeSeriesProcessorKey getRegisteredKey() { + return registeredKey; + } + /** * Collects the mapping, where to find which information and how to get them (in terms of getter * method). @@ -101,7 +106,8 @@ public TimeSeriesProcessor(Class timeSeriesClass, Class entryClass, Class< * @return A mapping from field name to a tuple of source information and equivalent getter method */ private SortedMap buildFieldToSource( - Class timeSeriesClass, Class entryClass, Class valueClass) { + Class timeSeriesClass, Class entryClass, Class valueClass) + throws EntityProcessorException { /* Get the mapping from field name to getter method ignoring the getter for returning all entries */ Map timeSeriesMapping = mapFieldNameToGetter(timeSeriesClass, Arrays.asList("entries", "uuid", "type")) @@ -176,7 +182,7 @@ private SortedMap buildFieldToSource( } @Override - public Optional> handleEntity(TimeSeries entity) { + public LinkedHashMap handleEntity(TimeSeries entity) { throw new UnsupportedOperationException( "Don't invoke this simple method, but TimeSeriesProcessor#handleTimeSeries(TimeSeries)."); } @@ -187,7 +193,8 @@ public Optional> handleEntity(TimeSeries entity) { * @param timeSeries Time series to handle * @return A set of mappings from field name to value */ - public Set> handleTimeSeries(T timeSeries) { + public Set> handleTimeSeries(T timeSeries) + throws EntityProcessorException { TimeSeriesProcessorKey key = new TimeSeriesProcessorKey(timeSeries); if (!registeredKey.equals(key)) throw new EntityProcessorException( @@ -219,7 +226,7 @@ public Set> handleTimeSeries(T timeSeries) { * @param entry Actual entry to handle * @return A sorted map from field name to value as String representation */ - private Map handleEntry(T timeSeries, E entry) { + private Map handleEntry(T timeSeries, E entry) throws EntityProcessorException { /* Handle the information in the time series */ Map timeSeriesFieldToMethod = extractFieldToMethod(TIMESERIES); LinkedHashMap timeSeriesResults = diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index ec50a044f..da67f99a0 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -5,10 +5,7 @@ */ package edu.ie3.datamodel.io.sink; -import edu.ie3.datamodel.exceptions.ConnectorException; -import edu.ie3.datamodel.exceptions.ExtractorException; -import edu.ie3.datamodel.exceptions.ProcessorProviderException; -import edu.ie3.datamodel.exceptions.SinkException; +import edu.ie3.datamodel.exceptions.*; import edu.ie3.datamodel.io.connectors.CsvFileConnector; import edu.ie3.datamodel.io.csv.BufferedCsvWriter; import edu.ie3.datamodel.io.extractor.Extractor; @@ -33,6 +30,7 @@ import edu.ie3.datamodel.models.value.Value; import edu.ie3.util.StringUtils; import java.io.IOException; +import java.nio.file.Path; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -58,8 +56,8 @@ public class CsvFileSink implements InputDataSink, OutputDataSink { private final String csvSep; - public CsvFileSink(String baseFolderPath) { - this(baseFolderPath, new FileNamingStrategy(), false, ","); + public CsvFileSink(Path baseFolderPath) throws EntityProcessorException { + this(baseFolderPath, new FileNamingStrategy(), ","); } /** @@ -69,17 +67,11 @@ public CsvFileSink(String baseFolderPath) { * * @param baseFolderPath the base folder path where the files should be put into * @param fileNamingStrategy the data sink file naming strategy that should be used - * @param initFiles true if the files should be created during initialization (might create files, - * that only consist of a headline, because no data will be written into them), false - * otherwise * @param csvSep the csv file separator that should be use */ - public CsvFileSink( - String baseFolderPath, - FileNamingStrategy fileNamingStrategy, - boolean initFiles, - String csvSep) { - this(baseFolderPath, new ProcessorProvider(), fileNamingStrategy, initFiles, csvSep); + public CsvFileSink(Path baseFolderPath, FileNamingStrategy fileNamingStrategy, String csvSep) + throws EntityProcessorException { + this(baseFolderPath, new ProcessorProvider(), fileNamingStrategy, csvSep); } /** @@ -94,22 +86,16 @@ public CsvFileSink( * @param baseFolderPath the base folder path where the files should be put into * @param processorProvider the processor provided that should be used for entity serialization * @param fileNamingStrategy the data sink file naming strategy that should be used - * @param initFiles true if the files should be created during initialization (might create files, - * that only consist of a headline, because no data will be written into them), false - * otherwise * @param csvSep the csv file separator that should be use */ public CsvFileSink( - String baseFolderPath, + Path baseFolderPath, ProcessorProvider processorProvider, FileNamingStrategy fileNamingStrategy, - boolean initFiles, String csvSep) { this.csvSep = csvSep; this.processorProvider = processorProvider; this.connector = new CsvFileConnector(baseFolderPath, fileNamingStrategy); - - if (initFiles) initFiles(processorProvider, connector); } @Override @@ -276,23 +262,10 @@ public , V extends Value> void persistTimeSeries( } private , V extends Value> void persistTimeSeries( - TimeSeries timeSeries, BufferedCsvWriter writer) { - TimeSeriesProcessorKey key = new TimeSeriesProcessorKey(timeSeries); - + TimeSeries timeSeries, BufferedCsvWriter writer) throws ProcessorProviderException { try { Set> entityFieldData = - processorProvider - .handleTimeSeries(timeSeries) - .orElseThrow( - () -> - new SinkException( - "Cannot persist time series of combination '" - + key - + "'. This sink can only process the following combinations: [" - + processorProvider.getRegisteredTimeSeriesCombinations().stream() - .map(TimeSeriesProcessorKey::toString) - .collect(Collectors.joining(",")) - + "]")); + processorProvider.handleTimeSeries(timeSeries); entityFieldData.forEach( data -> { try { @@ -303,8 +276,8 @@ private , V extends Value> void persistTimeSeries( log.error("Exception occurred during processing the provided data fields: ", e); } }); - } catch (SinkException e) { - log.error("Exception occurred during processor request: ", e); + } catch (ProcessorProviderException e) { + throw new ProcessorProviderException("Exception occurred during processor request: ", e); } } @@ -316,23 +289,9 @@ private , V extends Value> void persistTimeSeries( * @param bounded to be all unique entities */ private void write(C entity) { - LinkedHashMap entityFieldData; try { - entityFieldData = - processorProvider - .handleEntity(entity) - .map(this::csvEntityFieldData) - .orElseThrow( - () -> - new SinkException( - "Cannot persist entity of type '" - + entity.getClass().getSimpleName() - + "'. This sink can only process the following entities: [" - + processorProvider.getRegisteredClasses().stream() - .map(Class::getSimpleName) - .collect(Collectors.joining(",")) - + "]")); - + LinkedHashMap entityFieldData = + processorProvider.handleEntity(entity).map(this::csvEntityFieldData).getOrThrow(); String[] headerElements = processorProvider.getHeaderElements(entity.getClass()); BufferedCsvWriter writer = connector.getOrInitWriter(entity.getClass(), headerElements, csvSep); @@ -352,40 +311,6 @@ private void write(C entity) { } } - /** - * Initialize files, hence create a file for each expected class that will be processed in the - * future. Please note, that files for time series can only be create on presence of a concrete - * time series, as their file name depends on the individual uuid of the time series. - * - * @param processorProvider the processor provider all files that will be processed is derived - * from - * @param connector the connector to the files - */ - private void initFiles( - final ProcessorProvider processorProvider, final CsvFileConnector connector) { - processorProvider - .getRegisteredClasses() - .forEach( - clz -> { - try { - String[] headerElements = - csvHeaderElements(processorProvider.getHeaderElements(clz)); - - connector.getOrInitWriter(clz, headerElements, csvSep); - } catch (ProcessorProviderException e) { - log.error( - "Error during receiving of head line elements. Cannot prepare writer for class {}.", - clz, - e); - } catch (ConnectorException e) { - log.error( - "Error during instantiation files. Cannot get or init writer for class {}.", - clz, - e); - } - }); - } - /** * Transforms a provided array of strings to valid csv formatted strings (according to csv * specification RFC 4180) diff --git a/src/main/java/edu/ie3/datamodel/io/sink/DataSink.java b/src/main/java/edu/ie3/datamodel/io/sink/DataSink.java index f5a20b9b2..4809b80fe 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/DataSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/DataSink.java @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.sink; +import edu.ie3.datamodel.exceptions.ProcessorProviderException; import edu.ie3.datamodel.io.connectors.DataConnector; import edu.ie3.datamodel.io.processor.EntityProcessor; import edu.ie3.datamodel.models.UniqueEntity; @@ -37,7 +38,7 @@ public interface DataSink { * @param bounded to be all unique entities. Handling of specific entities is normally then * executed by a specific {@link EntityProcessor} */ - void persist(C entity); + void persist(C entity) throws ProcessorProviderException; /** * Should implement the entry point of a data sink to persist multiple entities in a collection. @@ -51,7 +52,8 @@ public interface DataSink { * @param bounded to be all unique entities. Handling of specific entities is normally then * executed by a specific {@link EntityProcessor} */ - void persistAll(Collection entities); + void persistAll(Collection entities) + throws ProcessorProviderException; /** * Should implement the handling of a whole time series. Therefore the single entries have to be @@ -62,5 +64,5 @@ public interface DataSink { * @param Type of actual value, that is inside the entry */ , V extends Value> void persistTimeSeries( - TimeSeries timeSeries); + TimeSeries timeSeries) throws ProcessorProviderException; } diff --git a/src/main/java/edu/ie3/datamodel/io/sink/InfluxDbSink.java b/src/main/java/edu/ie3/datamodel/io/sink/InfluxDbSink.java index 20eac6dbb..7c2f7b9f8 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/InfluxDbSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/InfluxDbSink.java @@ -5,11 +5,11 @@ */ package edu.ie3.datamodel.io.sink; -import edu.ie3.datamodel.exceptions.SinkException; +import edu.ie3.datamodel.exceptions.EntityProcessorException; +import edu.ie3.datamodel.exceptions.ProcessorProviderException; import edu.ie3.datamodel.io.connectors.InfluxDbConnector; import edu.ie3.datamodel.io.naming.EntityPersistenceNamingStrategy; import edu.ie3.datamodel.io.processor.ProcessorProvider; -import edu.ie3.datamodel.io.processor.timeseries.TimeSeriesProcessorKey; import edu.ie3.datamodel.models.UniqueEntity; import edu.ie3.datamodel.models.result.ResultEntity; import edu.ie3.datamodel.models.timeseries.TimeSeries; @@ -18,7 +18,6 @@ import java.time.ZonedDateTime; import java.util.*; import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; import org.slf4j.Logger; @@ -43,8 +42,8 @@ public class InfluxDbSink implements OutputDataSink { * @param entityPersistenceNamingStrategy needed to create measurement names for entities */ public InfluxDbSink( - InfluxDbConnector connector, - EntityPersistenceNamingStrategy entityPersistenceNamingStrategy) { + InfluxDbConnector connector, EntityPersistenceNamingStrategy entityPersistenceNamingStrategy) + throws EntityProcessorException { this.connector = connector; this.entityPersistenceNamingStrategy = entityPersistenceNamingStrategy; this.processorProvider = @@ -58,7 +57,7 @@ public InfluxDbSink( * * @param connector needed for database connection */ - public InfluxDbSink(InfluxDbConnector connector) { + public InfluxDbSink(InfluxDbConnector connector) throws EntityProcessorException { this(connector, new EntityPersistenceNamingStrategy()); } @@ -68,7 +67,7 @@ public void shutdown() { } @Override - public void persist(C entity) { + public void persist(C entity) throws ProcessorProviderException { Set points = extractPoints(entity); // writes only the exact one point instead of unnecessarily wrapping it in BatchPoints if (points.size() == 1) write(points.iterator().next()); @@ -76,7 +75,8 @@ public void persist(C entity) { } @Override - public void persistAll(Collection entities) { + public void persistAll(Collection entities) + throws ProcessorProviderException { Set points = new HashSet<>(); for (C entity : entities) { points.addAll(extractPoints(entity)); @@ -86,7 +86,7 @@ public void persistAll(Collection entities) { @Override public , V extends Value> void persistTimeSeries( - TimeSeries timeSeries) { + TimeSeries timeSeries) throws ProcessorProviderException { Set points = transformToPoints(timeSeries); writeAll(points); } @@ -107,7 +107,7 @@ public void flush() { * * @param entity the entity to transform */ - private Optional transformToPoint(ResultEntity entity) { + private Point transformToPoint(ResultEntity entity) throws ProcessorProviderException { Optional measurementName = entityPersistenceNamingStrategy.getResultEntityName(entity.getClass()); if (measurementName.isEmpty()) @@ -124,37 +124,18 @@ private Optional transformToPoint(ResultEntity entity) { * @param entity the entity to transform * @param measurementName equivalent to the name of a relational table */ - private Optional transformToPoint(ResultEntity entity, String measurementName) { - LinkedHashMap entityFieldData; - try { - entityFieldData = - processorProvider - .handleEntity(entity) - .orElseThrow( - () -> - new SinkException( - "Cannot persist entity of type '" - + entity.getClass().getSimpleName() - + "'. This sink can only process the following entities: [" - + processorProvider.getRegisteredClasses().stream() - .map(Class::getSimpleName) - .collect(Collectors.joining(",")) - + "]")); - entityFieldData.remove(FIELD_NAME_TIME); - return Optional.of( - Point.measurement(transformToMeasurementName(measurementName)) - .time(entity.getTime().toInstant().toEpochMilli(), TimeUnit.MILLISECONDS) - .tag("input_model", entityFieldData.remove(FIELD_NAME_INPUT)) - .tag("scenario", connector.getScenarioName()) - .fields(Collections.unmodifiableMap(entityFieldData)) - .build()); - } catch (SinkException e) { - log.error( - "Cannot persist provided entity '{}'. Exception: {}", - entity.getClass().getSimpleName(), - e); - } - return Optional.empty(); + private Point transformToPoint(ResultEntity entity, String measurementName) + throws ProcessorProviderException { + + LinkedHashMap entityFieldData = + processorProvider.handleEntity(entity).getOrThrow(); + entityFieldData.remove(FIELD_NAME_TIME); + return Point.measurement(transformToMeasurementName(measurementName)) + .time(entity.getTime().toInstant().toEpochMilli(), TimeUnit.MILLISECONDS) + .tag("input_model", entityFieldData.remove(FIELD_NAME_INPUT)) + .tag("scenario", connector.getScenarioName()) + .fields(Collections.unmodifiableMap(entityFieldData)) + .build(); } /** @@ -166,8 +147,9 @@ private Optional transformToPoint(ResultEntity entity, String measurement * @param timeSeries the time series to transform */ private , V extends Value> Set transformToPoints( - TimeSeries timeSeries) { + TimeSeries timeSeries) throws ProcessorProviderException { if (timeSeries.getEntries().isEmpty()) return Collections.emptySet(); + Optional measurementName = entityPersistenceNamingStrategy.getEntityName(timeSeries); if (measurementName.isEmpty()) { String valueClassName = @@ -188,37 +170,21 @@ private , V extends Value> Set transformToPo * @param measurementName equivalent to the name of a relational table */ private , V extends Value> Set transformToPoints( - TimeSeries timeSeries, String measurementName) { - TimeSeriesProcessorKey key = new TimeSeriesProcessorKey(timeSeries); + TimeSeries timeSeries, String measurementName) throws ProcessorProviderException { Set points = new HashSet<>(); - try { - Set> entityFieldData = - processorProvider - .handleTimeSeries(timeSeries) - .orElseThrow( - () -> - new SinkException( - "Cannot persist time series of combination '" - + key - + "'. This sink can only process the following combinations: [" - + processorProvider.getRegisteredTimeSeriesCombinations().stream() - .map(TimeSeriesProcessorKey::toString) - .collect(Collectors.joining(",")) - + "]")); + Set> entityFieldData = + processorProvider.handleTimeSeries(timeSeries); - for (LinkedHashMap dataMapping : entityFieldData) { - String timeString = dataMapping.remove(FIELD_NAME_TIME); - long timeMillis = ZonedDateTime.parse(timeString).toInstant().toEpochMilli(); - Point point = - Point.measurement(transformToMeasurementName(measurementName)) - .time(timeMillis, TimeUnit.MILLISECONDS) - .tag("scenario", connector.getScenarioName()) - .fields(Collections.unmodifiableMap(dataMapping)) - .build(); - points.add(point); - } - } catch (SinkException e) { - log.error("Cannot persist provided time series '{}'. Exception: {}", key, e); + for (LinkedHashMap dataMapping : entityFieldData) { + String timeString = dataMapping.remove(FIELD_NAME_TIME); + long timeMillis = ZonedDateTime.parse(timeString).toInstant().toEpochMilli(); + Point point = + Point.measurement(transformToMeasurementName(measurementName)) + .time(timeMillis, TimeUnit.MILLISECONDS) + .tag("scenario", connector.getScenarioName()) + .fields(Collections.unmodifiableMap(dataMapping)) + .build(); + points.add(point); } return points; } @@ -233,20 +199,12 @@ private , V extends Value> Set transformToPo * @param bounded to be all unique entities, but logs an error and returns an empty Set if it * does not extend {@link ResultEntity} or {@link TimeSeries} */ - private Set extractPoints(C entity) { + private Set extractPoints(C entity) + throws ProcessorProviderException { Set points = new HashSet<>(); /* Distinguish between result models and time series */ if (entity instanceof ResultEntity resultEntity) { - try { - points.add( - transformToPoint(resultEntity) - .orElseThrow(() -> new SinkException("Could not transform entity"))); - } catch (SinkException e) { - log.error( - "Cannot persist provided entity '{}'. Exception: {}", - entity.getClass().getSimpleName(), - e); - } + points.add(transformToPoint(resultEntity)); } else if (entity instanceof TimeSeries timeSeries) { points.addAll(transformToPoints(timeSeries)); } else { diff --git a/src/main/java/edu/ie3/datamodel/io/source/DataSource.java b/src/main/java/edu/ie3/datamodel/io/source/DataSource.java index 486dbaa9e..0687e9e50 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/DataSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/DataSource.java @@ -1,14 +1,17 @@ /* - * © 2021. TU Dortmund University, + * © 2023. TU Dortmund University, * Institute of Energy Systems, Energy Efficiency and Energy Economics, * Research group Distribution grid planning and operation */ package edu.ie3.datamodel.io.source; -/** - * General interface that is implemented by all specific data sources for different types of data - * structures that are persisted in different locations. Note: This interface is still under - * development and should be considered more as an internal API. It might change or even will be - * removed in the future! - */ -public interface DataSource {} +import edu.ie3.datamodel.models.UniqueEntity; +import java.util.*; +import java.util.stream.Stream; + +/** Interface that include functionalities for data sources in the database table, csv file etc. */ +public interface DataSource { + + /** Creates a stream of maps that represent the rows in the database */ + Stream> getSourceData(Class entityClass); +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/EntitySource.java b/src/main/java/edu/ie3/datamodel/io/source/EntitySource.java new file mode 100644 index 000000000..eec826f97 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/source/EntitySource.java @@ -0,0 +1,356 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.source; + +import edu.ie3.datamodel.exceptions.FactoryException; +import edu.ie3.datamodel.exceptions.SourceException; +import edu.ie3.datamodel.io.factory.EntityFactory; +import edu.ie3.datamodel.io.factory.SimpleEntityData; +import edu.ie3.datamodel.io.factory.input.AssetInputEntityData; +import edu.ie3.datamodel.io.factory.input.ConnectorInputEntityData; +import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData; +import edu.ie3.datamodel.io.factory.input.TypedConnectorInputEntityData; +import edu.ie3.datamodel.models.UniqueEntity; +import edu.ie3.datamodel.models.input.*; +import edu.ie3.datamodel.models.result.ResultEntity; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.*; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Class that provides all functionalities to build entities */ +public abstract class EntitySource { + + protected static final Logger log = LoggerFactory.getLogger(EntitySource.class); + + // field names + protected static final String OPERATOR = "operator"; + protected static final String NODE = "node"; + protected static final String TYPE = "type"; + protected static final String FIELDS_TO_VALUES_MAP = "fieldsToValuesMap"; + + DataSource dataSource; + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= + + protected String buildSkippingMessage( + String entityDesc, String entityUuid, String entityId, String missingElementsString) { + return "Skipping " + + entityDesc + + " with uuid " + + entityUuid + + " and id " + + entityId + + ". Not all required entities found or map is missing entity key!\nMissing elements:\n" + + missingElementsString; + } + + protected String safeMapGet(Map map, String key, String mapName) { + return Optional.ofNullable(map.get(key)) + .orElse( + "Key '" + + key + + "' not found" + + (mapName.isEmpty() ? "!" : " in map '" + mapName + "'!")); + } + + /** + * Returns an {@link Optional} of the first {@link UniqueEntity} element of this collection + * matching the provided UUID or an empty {@code Optional} if no matching entity can be found. + * + * @param entityUuid uuid of the entity that should be looked for + * @param entities collection of entities that should be + * @param type of the entity that will be returned, derived from the provided collection + * @return either an optional containing the first entity that has the provided uuid or an empty + * optional if no matching entity with the provided uuid can be found + */ + protected Optional findFirstEntityByUuid( + String entityUuid, Collection entities) { + return entities.stream() + .parallel() + .filter(uniqueEntity -> uniqueEntity.getUuid().toString().equalsIgnoreCase(entityUuid)) + .findFirst(); + } + + /** + * Checks if the requested type of an asset can be found in the provided collection of types based + * on the provided fields to values mapping. The provided fields to values mapping needs to have + * one and only one field with key {@link #TYPE} and a corresponding UUID value. If the type can + * be found in the provided collection based on the UUID it is returned wrapped in a {@link + * Success}. Otherwise a {@link Failure} is returned and a warning is logged. + * + * @param types a collection of types that should be used for searching + * @param fieldsToAttributes the field name to value mapping incl. the key {@link #TYPE} + * @param skippedClassString debug string of the class that will be skipping + * @param the type of the resulting type instance + * @return a {@link Success} containing the type or a {@link Failure} if the type cannot be found + */ + protected Try getAssetType( + Collection types, Map fieldsToAttributes, String skippedClassString) { + + Optional assetType = + Optional.ofNullable(fieldsToAttributes.get(TYPE)) + .flatMap(typeUuid -> findFirstEntityByUuid(typeUuid, types)); + + // if the type is not present we return an empty element and + // log a warning + if (assetType.isEmpty()) { + String skippingMessage = + buildSkippingMessage( + skippedClassString, + safeMapGet(fieldsToAttributes, "uuid", FIELDS_TO_VALUES_MAP), + safeMapGet(fieldsToAttributes, "id", FIELDS_TO_VALUES_MAP), + TYPE + ": " + safeMapGet(fieldsToAttributes, TYPE, FIELDS_TO_VALUES_MAP)); + return new Failure<>(new SourceException("Failure due to: " + skippingMessage)); + } + return new Success<>(assetType.get()); + } + + /** + * Finds the required asset type and if present, adds it to the untyped entity data + * + * @param untypedEntityData Untyped entity data to enrich + * @param availableTypes Yet available asset types + * @param Type of the asset type + * @return {@link Try} to enhanced data + */ + protected + Try, SourceException> findAndAddType( + ConnectorInputEntityData untypedEntityData, Collection availableTypes) { + Try assetTypeOption = + getAssetType( + availableTypes, + untypedEntityData.getFieldsToValues(), + untypedEntityData.getClass().getSimpleName()); + return assetTypeOption.map(assetType -> addTypeToEntityData(untypedEntityData, assetType)); + } + + /** + * Enriches the given, untyped entity data with the provided asset type + * + * @param untypedEntityData Untyped entity data to enrich + * @param assetType Asset type to add + * @param Type of the asset type + * @return The enriched entity data + */ + protected TypedConnectorInputEntityData addTypeToEntityData( + ConnectorInputEntityData untypedEntityData, T assetType) { + Map fieldsToAttributes = untypedEntityData.getFieldsToValues(); + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove(TYPE); + + // build result object + return new TypedConnectorInputEntityData<>( + fieldsToAttributes, + untypedEntityData.getTargetClass(), + untypedEntityData.getOperatorInput(), + untypedEntityData.getNodeA(), + untypedEntityData.getNodeB(), + assetType); + } + + /** + * Returns either the first instance of a {@link OperatorInput} in the provided collection of or + * {@link OperatorInput#NO_OPERATOR_ASSIGNED} + * + * @param operators the collections of {@link OperatorInput}s that should be searched in + * @param operatorUuid the operator uuid that is requested + * @return either the first found instancen of {@link OperatorInput} or {@link + * OperatorInput#NO_OPERATOR_ASSIGNED} + */ + protected OperatorInput getFirstOrDefaultOperator( + Collection operators, + String operatorUuid, + String entityClassName, + String requestEntityUuid) { + if (operatorUuid == null) { + log.warn( + "Input source for class '{}' is missing the 'operator' field. " + + "This is okay, but you should consider fixing the file by adding the field. " + + "Defaulting to 'NO OPERATOR ASSIGNED'", + entityClassName); + return OperatorInput.NO_OPERATOR_ASSIGNED; + } else { + return operatorUuid.trim().isEmpty() + ? OperatorInput.NO_OPERATOR_ASSIGNED + : findFirstEntityByUuid(operatorUuid, operators) + .orElseGet( + () -> { + log.debug( + "Cannot find operator with uuid '{}' for element '{}' and uuid '{}'. Defaulting to 'NO OPERATOR ASSIGNED'.", + operatorUuid, + entityClassName, + requestEntityUuid); + return OperatorInput.NO_OPERATOR_ASSIGNED; + }); + } + } + + /** + * Returns a stream of tries of {@link NodeAssetInputEntityData} that can be used to build + * instances of several subtypes of {@link UniqueEntity} by a corresponding {@link EntityFactory} + * that consumes this data. param assetInputEntityDataStream + * + * @param assetInputEntityDataStream a stream consisting of {@link AssetInputEntityData} that is + * enriched with {@link NodeInput} data + * @param nodes a collection of {@link NodeInput} entities that should be used to build the data + * @return stream of the entity data wrapped in a {@link Try} + */ + protected Stream> nodeAssetInputEntityDataStream( + Stream assetInputEntityDataStream, Collection nodes) { + return assetInputEntityDataStream + .parallel() + .map( + assetInputEntityData -> { + // get the raw data + Map fieldsToAttributes = assetInputEntityData.getFieldsToValues(); + // get the node of the entity + String nodeUuid = fieldsToAttributes.get(NODE); + Optional node = findFirstEntityByUuid(nodeUuid, nodes); + + // if the node is not present we return an empty element and + // log a warning + if (node.isEmpty()) { + String skippingMessage = + buildSkippingMessage( + assetInputEntityData.getTargetClass().getSimpleName(), + fieldsToAttributes.get("uuid"), + fieldsToAttributes.get("id"), + NODE + ": " + nodeUuid); + return new Failure<>(new SourceException("Failure due to: " + skippingMessage)); + } + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove(NODE); + + return new Success<>( + new NodeAssetInputEntityData( + fieldsToAttributes, + assetInputEntityData.getTargetClass(), + assetInputEntityData.getOperatorInput(), + node.get())); + }); + } + + /** + * Returns a stream of optional {@link AssetInputEntityData} that can be used to build instances + * of several subtypes of {@link UniqueEntity} by a corresponding {@link EntityFactory} that + * consumes this data. + * + * @param entityClass the entity class that should be build + * @param operators a collection of {@link OperatorInput} entities that should be used to build + * the data + * @param type of the entity that should be build + * @return stream of the entity data wrapped in a {@link Try} + */ + protected Stream assetInputEntityDataStream( + Class entityClass, Collection operators) { + return dataSource + .getSourceData(entityClass) + .map( + fieldsToAttributes -> + assetInputEntityDataStream(entityClass, fieldsToAttributes, operators)); + } + + protected AssetInputEntityData assetInputEntityDataStream( + Class entityClass, + Map fieldsToAttributes, + Collection operators) { + + // get the operator of the entity + String operatorUuid = fieldsToAttributes.get(OPERATOR); + OperatorInput operator = + getFirstOrDefaultOperator( + operators, + operatorUuid, + entityClass.getSimpleName(), + safeMapGet(fieldsToAttributes, "uuid", FIELDS_TO_VALUES_MAP)); + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().removeAll(new HashSet<>(Collections.singletonList(OPERATOR))); + + return new AssetInputEntityData(fieldsToAttributes, entityClass, operator); + } + + /** + * Returns a stream of {@link SimpleEntityData} for result entity classes, using a + * fields-to-attributes map. + * + * @param entityClass the entity class that should be build + * @param Type of the {@link ResultEntity} to expect + * @return stream of {@link SimpleEntityData} + */ + protected Stream simpleEntityDataStream( + Class entityClass) { + return dataSource + .getSourceData(entityClass) + .map(fieldsToAttributes -> new SimpleEntityData(fieldsToAttributes, entityClass)); + } + + protected Stream> assetInputEntityStream( + Class entityClass, + EntityFactory factory, + Collection operators) { + return assetInputEntityDataStream(entityClass, operators).map(factory::get); + } + + /** + * Returns a stream of {@link Try} entities that can be build by using {@link + * NodeAssetInputEntityData} and their corresponding factory. + * + * @param entityClass the entity class that should be build + * @param factory the factory that should be used for the building process + * @param nodes a collection of {@link NodeInput} entities that should be used to build the + * entities + * @param operators a collection of {@link OperatorInput} entities should be used to build the + * entities + * @param Type of the {@link AssetInput} to expect + * @return stream of tries of the entities that has been built by the factory + */ + protected Stream> nodeAssetEntityStream( + Class entityClass, + EntityFactory factory, + Collection nodes, + Collection operators) { + return nodeAssetInputEntityDataStream(assetInputEntityDataStream(entityClass, operators), nodes) + .map(factory::get); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + + public Set> buildNodeAssetEntities( + Class entityClass, + EntityFactory factory, + Collection nodes, + Collection operators) { + return nodeAssetEntityStream(entityClass, factory, nodes, operators) + .collect(Collectors.toSet()); + } + + public Set> buildAssetInputEntities( + Class entityClass, + EntityFactory factory, + Collection operators) { + return assetInputEntityStream(entityClass, factory, operators).collect(Collectors.toSet()); + } + + @SuppressWarnings("unchecked") + public Set> buildEntities( + Class entityClass, EntityFactory factory) { + return dataSource + .getSourceData(entityClass) + .map( + fieldsToAttributes -> { + SimpleEntityData data = new SimpleEntityData(fieldsToAttributes, entityClass); + return (Try) factory.get(data); + }) + .collect(Collectors.toSet()); + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/GraphicSource.java b/src/main/java/edu/ie3/datamodel/io/source/GraphicSource.java index b09ce937a..7cdc097ca 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/GraphicSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/GraphicSource.java @@ -5,100 +5,216 @@ */ package edu.ie3.datamodel.io.source; +import edu.ie3.datamodel.exceptions.GraphicSourceException; +import edu.ie3.datamodel.exceptions.SourceException; +import edu.ie3.datamodel.io.factory.input.graphics.LineGraphicInputEntityData; +import edu.ie3.datamodel.io.factory.input.graphics.LineGraphicInputFactory; +import edu.ie3.datamodel.io.factory.input.graphics.NodeGraphicInputEntityData; +import edu.ie3.datamodel.io.factory.input.graphics.NodeGraphicInputFactory; import edu.ie3.datamodel.models.input.NodeInput; +import edu.ie3.datamodel.models.input.OperatorInput; import edu.ie3.datamodel.models.input.connector.LineInput; +import edu.ie3.datamodel.models.input.connector.type.LineTypeInput; import edu.ie3.datamodel.models.input.container.GraphicElements; +import edu.ie3.datamodel.models.input.graphics.GraphicInput; import edu.ie3.datamodel.models.input.graphics.LineGraphicInput; import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput; -import java.util.Optional; -import java.util.Set; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.*; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** - * Interface that provides the capability to build entities of type {@link - * edu.ie3.datamodel.models.input.graphics.GraphicInput} from different data sources e.g. .csv files - * or databases + * Implementation that provides the capability to build entities of type {@link GraphicInput} from + * different data sources e.g. .csv files or databases * * @version 0.1 * @since 08.04.20 */ -public interface GraphicSource extends DataSource { +public class GraphicSource extends EntitySource { + // general fields + private final TypeSource typeSource; + private final RawGridSource rawGridSource; - /** - * Should return either a consistent instance of {@link GraphicElements} wrapped in {@link - * Optional} or an empty {@link Optional}. The decision to use {@link Optional} instead of - * returning the {@link GraphicElements} instance directly is motivated by the fact, that a {@link - * GraphicElements} is a container instance that depends on several other entities. Without being - * complete, it is useless for further processing. Hence, whenever at least one entity {@link - * GraphicElements} depends on cannot be provided, {@link Optional#empty()} should be returned and - * extensive logging should provide enough information to debug the error and fix the persistent - * data that has been failed to processed. - * - *

Furthermore, it is expected, that the specific implementation of this method ensures not - * only the completeness of the resulting {@link GraphicElements} instance, but also its validity - * e.g. in the sense that not duplicate UUIDs exist within all entities contained in the returning - * instance. - * - * @return either a valid, complete {@link GraphicElements} optional or {@link Optional#empty()} - */ - Optional getGraphicElements(); + // factories + private final LineGraphicInputFactory lineGraphicInputFactory; + private final NodeGraphicInputFactory nodeGraphicInputFactory; + + public GraphicSource(TypeSource typeSource, RawGridSource rawGridSource, DataSource dataSource) { + this.typeSource = typeSource; + this.rawGridSource = rawGridSource; + this.dataSource = dataSource; + + this.lineGraphicInputFactory = new LineGraphicInputFactory(); + this.nodeGraphicInputFactory = new NodeGraphicInputFactory(); + } + + /** Returns the graphic elements of the grid or throws a {@link SourceException} */ + public GraphicElements getGraphicElements() throws SourceException { + + // read all needed entities + /// start with types and operators + Set operators = typeSource.getOperators(); + Set lineTypes = typeSource.getLineTypes(); + + Set nodes = rawGridSource.getNodes(operators); + Set lines = rawGridSource.getLines(nodes, lineTypes, operators); + + Try, SourceException> nodeGraphics = + Try.of(() -> getNodeGraphicInput(nodes), SourceException.class); + Try, SourceException> lineGraphics = + Try.of(() -> getLineGraphicInput(lines), SourceException.class); + + List exceptions = Try.getExceptions(List.of(nodeGraphics, lineGraphics)); + + if (!exceptions.isEmpty()) { + throw new GraphicSourceException( + exceptions.size() + "error(s) occurred while initializing graphic elements. ", + exceptions); + } else { + // if everything is fine, return a GraphicElements instance + // getOrThrow should not throw an exception in this context, because all exception are + // filtered and thrown before + return new GraphicElements(nodeGraphics.getOrThrow(), lineGraphics.getOrThrow()); + } + } /** - * Returns a set of {@link NodeGraphicInput} instances. This set has to be unique in the sense of - * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided - * {@link NodeGraphicInput} which has to be checked manually, as {@link - * NodeGraphicInput#equals(Object)} is NOT restricted on the uuid of {@link NodeGraphicInput}. - * - * @return a set of object and uuid unique {@link NodeGraphicInput} entities + * If the set of {@link NodeInput} entities is not exhaustive for all available {@link + * NodeGraphicInput} entities or if an error during the building process occurs a {@link + * SourceException} is thrown, else all entities that has been able to be built are returned. */ - Set getNodeGraphicInput(); + public Set getNodeGraphicInput() throws SourceException { + return getNodeGraphicInput(rawGridSource.getNodes(typeSource.getOperators())); + } + + public Set getNodeGraphicInput(Set nodes) throws SourceException { + return Try.scanCollection( + buildNodeGraphicEntityData(nodes) + .map(nodeGraphicInputFactory::get) + .collect(Collectors.toSet()), + NodeGraphicInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** - * Returns a set of {@link NodeGraphicInput} instances. This set has to be unique in the sense of - * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided - * {@link NodeGraphicInput} which has to be checked manually, as {@link - * NodeGraphicInput#equals(Object)} is NOT restricted on the uuid of {@link NodeGraphicInput}. - * - *

In contrast to {@link #getNodeGraphicInput} this interface provides the ability to pass in - * an already existing set of {@link NodeInput} entities, the {@link NodeGraphicInput} instances - * depend on. Doing so, already loaded nodes can be recycled to improve performance and prevent - * unnecessary loading operations. - * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. - * - * @param nodes a set of object and uuid unique nodes that should be used for the returning - * instances - * @return a set of object and uuid unique {@link NodeGraphicInput} entities + * If the set of {@link LineInput} entities is not exhaustive for all available {@link + * LineGraphicInput} entities or if an error during the building process occurs a {@link + * SourceException} is thrown, else all entities that has been able to be built are returned. */ - Set getNodeGraphicInput(Set nodes); + public Set getLineGraphicInput() throws SourceException { + Set operators = typeSource.getOperators(); + return getLineGraphicInput( + rawGridSource.getLines( + rawGridSource.getNodes(operators), typeSource.getLineTypes(), operators)); + } + + public Set getLineGraphicInput(Set lines) throws SourceException { + return Try.scanCollection( + buildLineGraphicEntityData(lines) + .map(lineGraphicInputFactory::get) + .collect(Collectors.toSet()), + LineGraphicInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + // build EntityData /** - * Returns a set of {@link LineGraphicInput} instances. This set has to be unique in the sense of - * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided - * {@link LineGraphicInput} which has to be checked manually, as {@link - * LineGraphicInput#equals(Object)} is NOT restricted on the uuid of {@link LineGraphicInput}. + * Builds a stream of {@link NodeGraphicInputEntityData} instances that can be consumed by a + * {@link NodeGraphicInputFactory} to build instances of {@link NodeGraphicInput} entities. This + * method depends on corresponding instances of {@link NodeInput} entities that are represented by + * a corresponding {@link NodeGraphicInput} entity. The determination of matching {@link + * NodeInput} and {@link NodeGraphicInput} entities is carried out by the UUID of the {@link + * NodeInput} entity. Hence it is crucial to only pass over collections that are pre-checked for + * the uniqueness of the UUIDs of the nodes they contain. No further sanity checks are included in + * this method. If no UUID of a {@link NodeInput} entity can be found for a {@link + * NodeGraphicInputEntityData} instance, a {@link Failure} is included in the stream and warning + * is logged. * - * @return a set of object and uuid unique {@link LineGraphicInput} entities + * @param nodes a set of nodes with unique uuids + * @return a stream of tries of {@link NodeGraphicInput} entities */ - Set getLineGraphicInput(); + protected Stream> buildNodeGraphicEntityData( + Set nodes) { + return dataSource + .getSourceData(NodeGraphicInput.class) + .map(fieldsToAttributes -> buildNodeGraphicEntityData(fieldsToAttributes, nodes)); + } + + protected Try buildNodeGraphicEntityData( + Map fieldsToAttributes, Set nodes) { + + // get the node of the entity + String nodeUuid = fieldsToAttributes.get(NODE); + Optional node = findFirstEntityByUuid(nodeUuid, nodes); + + // if the node is not present we return a failure + // log a warning + if (node.isEmpty()) { + String skippingMessage = + buildSkippingMessage( + NodeGraphicInput.class.getSimpleName(), + fieldsToAttributes.get("uuid"), + "no id (graphic entities don't have one)", + NODE + ": " + nodeUuid); + return new Failure<>(new SourceException("Failure due to: " + skippingMessage)); + } + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove(NODE); + + return new Success<>(new NodeGraphicInputEntityData(fieldsToAttributes, node.get())); + } /** - * Returns a set of {@link LineGraphicInput} instances. This set has to be unique in the sense of - * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided - * {@link LineGraphicInput} which has to be checked manually, as {@link - * LineGraphicInput#equals(Object)} is NOT restricted on the uuid of {@link LineGraphicInput}. + * Builds a stream of {@link LineGraphicInputEntityData} instances that can be consumed by a + * {@link LineGraphicInputFactory} to build instances of {@link LineGraphicInput} entities. This + * method depends on corresponding instances of {@link LineInput} entities that are represented by + * a corresponding {@link LineGraphicInput} entity. The determination of matching {@link + * LineInput} and {@link LineGraphicInput} entities is carried out by the UUID of the {@link + * LineInput} entity. Hence it is crucial to only pass over collections that are pre-checked for + * the uniqueness of the UUIDs of the nodes they contain. No further sanity checks are included in + * this method. If no UUID of a {@link LineInput} entity can be found for a {@link + * LineGraphicInputEntityData} instance, a {@link Failure} is included in the stream and warning + * is logged. * - *

In contrast to {@link #getLineGraphicInput} this interface provides the ability to pass in - * an already existing set of {@link LineInput} entities, the {@link LineGraphicInput} instances - * depend on. Doing so, already loaded nodes can be recycled to improve performance and prevent - * unnecessary loading operations. - * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. - * - * @param lines a set of object and uuid unique lines that should be used for the returning - * instances - * @return a set of object and uuid unique {@link LineGraphicInput} entities + * @param lines a set of lines with unique uuids + * @return a stream of tries of {@link LineGraphicInput} entities */ - Set getLineGraphicInput(Set lines); + protected Stream> buildLineGraphicEntityData( + Set lines) { + return dataSource + .getSourceData(LineGraphicInput.class) + .map(fieldsToAttributes -> buildLineGraphicEntityData(fieldsToAttributes, lines)); + } + + protected Try buildLineGraphicEntityData( + Map fieldsToAttributes, Set lines) { + + // get the node of the entity + String lineUuid = fieldsToAttributes.get("line"); + Optional line = findFirstEntityByUuid(lineUuid, lines); + + // if the node is not present we return an empty element and + // log a warning + if (line.isEmpty()) { + String skippingMessage = + buildSkippingMessage( + LineGraphicInput.class.getSimpleName(), + fieldsToAttributes.get("uuid"), + "no id (graphic entities don't have one)", + "line: " + lineUuid); + return new Failure<>(new SourceException("Failure due to: " + skippingMessage)); + } + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove("line"); + + return new Success<>(new LineGraphicInputEntityData(fieldsToAttributes, line.get())); + } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/IdCoordinateSource.java b/src/main/java/edu/ie3/datamodel/io/source/IdCoordinateSource.java index 1940c8b96..a41232c53 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/IdCoordinateSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/IdCoordinateSource.java @@ -8,14 +8,16 @@ import edu.ie3.util.geo.CoordinateDistance; import edu.ie3.util.geo.GeoUtils; import java.util.*; +import javax.measure.quantity.Length; import org.locationtech.jts.geom.Point; +import tech.units.indriya.ComparableQuantity; /** * This class serves mapping purposes between the ID of a coordinate and the actual coordinate with * latitude and longitude values, which is especially needed for data source that don't offer * combined primary or foreign keys. */ -public interface IdCoordinateSource extends DataSource { +public interface IdCoordinateSource { /** * Get the matching coordinate for the given ID @@ -49,32 +51,117 @@ public interface IdCoordinateSource extends DataSource { Collection getAllCoordinates(); /** - * Returns the nearest n coordinate points to the given coordinate from a collection of all - * available points + * Returns the nearest n coordinate points. If n is greater than four, this method will try to + * return the corner points of the bounding box. + * + * @param coordinate the coordinate to look up + * @param n number of searched points + * @return the nearest n coordinates or all coordinates if n is less than all available points + */ + List getNearestCoordinates(Point coordinate, int n); + + /** + * Returns the closest n coordinate points to the given coordinate, that are inside a given + * bounding box, from a collection of all available points. The bounding box is calculated with + * the given distance. If n is greater than four, this method will try to return the corner points + * of the bounding box. * * @param coordinate the coordinate to look up the nearest neighbours for * @param n how many neighbours to look up - * @return the n nearest coordinates to the given point + * @param distance to the borders of the envelope that contains the coordinates + * @return the nearest n coordinates to the given point */ - default List getNearestCoordinates(Point coordinate, int n) { - return getNearestCoordinates(coordinate, n, getAllCoordinates()); - } + List getClosestCoordinates( + Point coordinate, int n, ComparableQuantity distance); /** - * Returns the nearest n coordinate points to the given coordinate from a given collection of - * points. If the set is empty or null we look through all coordinates. + * Calculates and returns the nearest n coordinate distances to the given coordinate from a given + * collection of points. If the set is empty or null an empty list is returned. If n is greater + * than four, this method will try to return the corner points of the bounding box. * * @param coordinate the coordinate to look up the nearest neighbours for * @param n how many neighbours to look up * @param coordinates the collection of points - * @return the n nearest coordinates to the given point + * @return a list of the nearest n coordinates to the given point or an empty list */ - default List getNearestCoordinates( + default List calculateCoordinateDistances( Point coordinate, int n, Collection coordinates) { - SortedSet sortedDistances = - GeoUtils.calcOrderedCoordinateDistances( - coordinate, - (coordinates != null && !coordinates.isEmpty()) ? coordinates : getAllCoordinates()); - return sortedDistances.stream().limit(n).toList(); + if (coordinates != null && !coordinates.isEmpty()) { + SortedSet sortedDistances = + GeoUtils.calcOrderedCoordinateDistances(coordinate, coordinates); + return restrictToBoundingBox(coordinate, sortedDistances, n); + } else { + return Collections.emptyList(); + } + } + + /** + * Method for evaluating the found points. This method tries to return the four corner points of + * the bounding box of the given coordinate. If one of the found points matches the given + * coordinate, only this point is returned. If the given number of searched points is less than + * four, this method will only return the nearest n corner points. If the given number of searched + * points is greater than four, this method will return the four corner points plus the nearest n + * points to match the number of searched points. + * + *

To work properly, the given collection of {@link CoordinateDistance}'s should already be + * sorted by distance. + * + * @param coordinate at the center of the bounding box + * @param distances list of found points with their distances + * @param numberOfPoints that should be returned + * @return list of distances + */ + default List restrictToBoundingBox( + Point coordinate, Collection distances, int numberOfPoints) { + boolean topLeft = false; + boolean topRight = false; + boolean bottomLeft = false; + boolean bottomRight = false; + + List resultingDistances = new ArrayList<>(); + List other = new ArrayList<>(); + + // search for smallest bounding box + for (CoordinateDistance distance : distances) { + Point point = distance.getCoordinateB(); + + // check for bounding box + if (!topLeft && (point.getX() < coordinate.getX() && point.getY() > coordinate.getY())) { + resultingDistances.add(distance); + topLeft = true; + } else if (!topRight + && (point.getX() > coordinate.getX() && point.getY() > coordinate.getY())) { + resultingDistances.add(distance); + topRight = true; + } else if (!bottomLeft + && (point.getX() < coordinate.getX() && point.getY() < coordinate.getY())) { + resultingDistances.add(distance); + bottomLeft = true; + } else if (!bottomRight + && (point.getX() > coordinate.getX() && point.getY() < coordinate.getY())) { + resultingDistances.add(distance); + bottomRight = true; + } else if (coordinate.equalsExact(point, 1e-6)) { + // if current point is matching the given coordinate, we need to return only the current + // point + + resultingDistances.clear(); + resultingDistances.add(distance); + return resultingDistances; + } else { + other.add(distance); + } + } + + // check if n distances are found + int diff = numberOfPoints - resultingDistances.size(); + + if (diff > 0) { + resultingDistances.addAll(other.stream().limit(diff).toList()); + } else if (diff < 0) { + return resultingDistances.stream().limit(numberOfPoints).toList(); + } + + return resultingDistances; } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/RawGridSource.java b/src/main/java/edu/ie3/datamodel/io/source/RawGridSource.java index 2910b3178..b06d55d4d 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/RawGridSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/RawGridSource.java @@ -5,9 +5,16 @@ */ package edu.ie3.datamodel.io.source; +import edu.ie3.datamodel.exceptions.FactoryException; +import edu.ie3.datamodel.exceptions.RawGridException; +import edu.ie3.datamodel.exceptions.SourceException; +import edu.ie3.datamodel.io.factory.EntityFactory; +import edu.ie3.datamodel.io.factory.input.*; +import edu.ie3.datamodel.models.input.*; import edu.ie3.datamodel.models.input.MeasurementUnitInput; import edu.ie3.datamodel.models.input.NodeInput; import edu.ie3.datamodel.models.input.OperatorInput; +import edu.ie3.datamodel.models.input.connector.*; import edu.ie3.datamodel.models.input.connector.LineInput; import edu.ie3.datamodel.models.input.connector.SwitchInput; import edu.ie3.datamodel.models.input.connector.Transformer2WInput; @@ -16,27 +23,61 @@ import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput; import edu.ie3.datamodel.models.input.connector.type.Transformer3WTypeInput; import edu.ie3.datamodel.models.input.container.RawGridElements; -import java.util.Optional; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.*; +import java.util.*; import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** - * Interface that provides the capability to build entities that are hold by a {@link + * Implementation that provides the capability to build entities that are hold by a {@link * RawGridElements} as well as the {@link RawGridElements} container as well from different data * sources e.g. .csv files or databases. * * @version 0.1 * @since 08.04.20 */ -public interface RawGridSource extends DataSource { +public class RawGridSource extends EntitySource { + + // field names + protected static final String NODE_A = "nodeA"; + protected static final String NODE_B = "nodeB"; + protected static final String TYPE = "type"; + + // general fields + private final TypeSource typeSource; + + // factories + private final NodeInputFactory nodeInputFactory; + private final LineInputFactory lineInputFactory; + private final Transformer2WInputFactory transformer2WInputFactory; + private final Transformer3WInputFactory transformer3WInputFactory; + private final SwitchInputFactory switchInputFactory; + private final MeasurementUnitInputFactory measurementUnitInputFactory; + + public RawGridSource(TypeSource typeSource, DataSource dataSource) { + this.typeSource = typeSource; + this.dataSource = dataSource; + + // init factories + this.nodeInputFactory = new NodeInputFactory(); + this.lineInputFactory = new LineInputFactory(); + this.transformer2WInputFactory = new Transformer2WInputFactory(); + this.transformer3WInputFactory = new Transformer3WInputFactory(); + this.switchInputFactory = new SwitchInputFactory(); + this.measurementUnitInputFactory = new MeasurementUnitInputFactory(); + } + /** - * Should return either a consistent instance of {@link RawGridElements} wrapped in {@link - * Optional} or an empty {@link Optional}. The decision to use {@link Optional} instead of - * returning the {@link RawGridElements} instance directly is motivated by the fact, that a {@link + * Should return either a consistent instance of {@link RawGridElements} or throw a {@link + * SourceException}. The decision to throw a {@link SourceException} instead of returning the + * incomplete {@link RawGridElements} instance is motivated by the fact, that a {@link * RawGridElements} is a container instance that depends on several other entities. Without being * complete, it is useless for further processing. * *

Hence, whenever at least one entity {@link RawGridElements} depends on cannot be provided, - * {@link Optional#empty()} should be returned and extensive logging should provide enough + * {@link SourceException} should be thrown. The thrown exception should provide enough * information to debug the error and fix the persistent data that has been failed to processed. * *

Furthermore, it is expected, that the specific implementation of this method ensures not @@ -44,9 +85,53 @@ public interface RawGridSource extends DataSource { * e.g. in the sense that not duplicate UUIDs exist within all entities contained in the returning * instance. * - * @return either a valid, complete {@link RawGridElements} optional or {@link Optional#empty()} + * @return either a valid, complete {@link RawGridElements} or throws a {@link SourceException} */ - Optional getGridData(); + public RawGridElements getGridData() throws SourceException { + /* read all needed entities start with the types and operators */ + Set operators = typeSource.getOperators(); + Set lineTypes = typeSource.getLineTypes(); + Set transformer2WTypeInputs = typeSource.getTransformer2WTypes(); + Set transformer3WTypeInputs = typeSource.getTransformer3WTypes(); + + /* assets */ + Set nodes = getNodes(operators); + Try, SourceException> lineInputs = + Try.of(() -> getLines(nodes, lineTypes, operators), SourceException.class); + Try, SourceException> transformer2WInputs = + Try.of( + () -> get2WTransformers(nodes, transformer2WTypeInputs, operators), + SourceException.class); + Try, SourceException> transformer3WInputs = + Try.of( + () -> get3WTransformers(nodes, transformer3WTypeInputs, operators), + SourceException.class); + Try, SourceException> switches = + Try.of(() -> getSwitches(nodes, operators), SourceException.class); + Try, SourceException> measurementUnits = + Try.of(() -> getMeasurementUnits(nodes, operators), SourceException.class); + + List exceptions = + Try.getExceptions( + List.of( + lineInputs, transformer2WInputs, transformer3WInputs, switches, measurementUnits)); + + if (!exceptions.isEmpty()) { + throw new RawGridException( + exceptions.size() + " error(s) occurred while initializing raw grid. ", exceptions); + } else { + /* build and return the grid if it is not empty */ + // getOrThrow should not throw an exception in this context, because all exception are + // filtered and thrown before + return new RawGridElements( + nodes, + lineInputs.getOrThrow(), + transformer2WInputs.getOrThrow(), + transformer3WInputs.getOrThrow(), + switches.getOrThrow(), + measurementUnits.getOrThrow()); + } + } /** * Returns a unique set of {@link NodeInput} instances. @@ -57,7 +142,9 @@ public interface RawGridSource extends DataSource { * * @return a set of object and uuid unique {@link NodeInput} entities */ - Set getNodes(); + public Set getNodes() throws SourceException { + return getNodes(typeSource.getOperators()); + } /** * Returns a set of {@link NodeInput} instances. This set has to be unique in the sense of object @@ -65,19 +152,27 @@ public interface RawGridSource extends DataSource { * NodeInput} which has to be checked manually, as {@link NodeInput#equals(Object)} is NOT * restricted on the uuid of {@link NodeInput}. * - *

In contrast to {@link #getNodes} this interface provides the ability to pass in an already + *

In contrast to {@link #getNodes} this method provides the ability to pass in an already * existing set of {@link OperatorInput} entities, the {@link NodeInput} instances depend on. * Doing so, already loaded nodes can be recycled to improve performance and prevent unnecessary * loading operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances * @return a set of object and uuid unique {@link NodeInput} entities */ - Set getNodes(Set operators); + public Set getNodes(Set operators) throws SourceException { + return Try.scanCollection( + assetInputEntityDataStream(NodeInput.class, operators) + .map(nodeInputFactory::get) + .collect(Collectors.toSet()), + NodeInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a unique set of {@link LineInput} instances. @@ -88,7 +183,10 @@ public interface RawGridSource extends DataSource { * * @return a set of object and uuid unique {@link LineInput} entities */ - Set getLines(); + public Set getLines() throws SourceException { + Set operators = typeSource.getOperators(); + return getLines(getNodes(operators), typeSource.getLineTypes(), operators); + } /** * Returns a set of {@link LineInput} instances. This set has to be unique in the sense of object @@ -96,13 +194,13 @@ public interface RawGridSource extends DataSource { * LineInput} which has to be checked manually, as {@link LineInput#equals(Object)} is NOT * restricted on the uuid of {@link LineInput}. * - *

In contrast to {@link #getNodes} this interface provides the ability to pass in an already + *

In contrast to {@link #getNodes} this method provides the ability to pass in an already * existing set of {@link NodeInput}, {@link LineTypeInput} and {@link OperatorInput} entities, * the {@link LineInput} instances depend on. Doing so, already loaded nodes, line types and * operators can be recycled to improve performance and prevent unnecessary loading operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances @@ -110,8 +208,16 @@ public interface RawGridSource extends DataSource { * @param lineTypeInputs a set of object and uuid unique {@link LineTypeInput} entities * @return a set of object and uuid unique {@link LineInput} entities */ - Set getLines( - Set nodes, Set lineTypeInputs, Set operators); + public Set getLines( + Set nodes, Set lineTypeInputs, Set operators) + throws SourceException { + return Try.scanCollection( + typedEntityStream(LineInput.class, lineInputFactory, nodes, operators, lineTypeInputs) + .collect(Collectors.toSet()), + LineInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a unique set of {@link Transformer2WInput} instances. @@ -123,7 +229,10 @@ Set getLines( * * @return a set of object and uuid unique {@link Transformer2WInput} entities */ - Set get2WTransformers(); + public Set get2WTransformers() throws SourceException { + Set operators = typeSource.getOperators(); + return get2WTransformers(getNodes(operators), typeSource.getTransformer2WTypes(), operators); + } /** * Returns a set of {@link Transformer2WInput} instances. This set has to be unique in the sense @@ -131,14 +240,14 @@ Set getLines( * {@link Transformer2WInput} which has to be checked manually, as {@link * Transformer2WInput#equals(Object)} is NOT restricted on the uuid of {@link Transformer2WInput}. * - *

In contrast to {@link #getNodes()} this interface provides the ability to pass in an already + *

In contrast to {@link #getNodes()} this method provides the ability to pass in an already * existing set of {@link NodeInput}, {@link Transformer2WTypeInput} and {@link OperatorInput} * entities, the {@link Transformer2WInput} instances depend on. Doing so, already loaded nodes, * line types and operators can be recycled to improve performance and prevent unnecessary loading * operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances @@ -147,10 +256,23 @@ Set getLines( * entities * @return a set of object and uuid unique {@link Transformer2WInput} entities */ - Set get2WTransformers( + public Set get2WTransformers( Set nodes, Set transformer2WTypes, - Set operators); + Set operators) + throws SourceException { + return Try.scanCollection( + typedEntityStream( + Transformer2WInput.class, + transformer2WInputFactory, + nodes, + operators, + transformer2WTypes) + .collect(Collectors.toSet()), + Transformer2WInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a unique set of {@link Transformer3WInput} instances. @@ -162,7 +284,10 @@ Set get2WTransformers( * * @return a set of object and uuid unique {@link Transformer3WInput} entities */ - Set get3WTransformers(); + public Set get3WTransformers() throws SourceException { + Set operators = typeSource.getOperators(); + return get3WTransformers(getNodes(operators), typeSource.getTransformer3WTypes(), operators); + } /** * Returns a set of {@link Transformer3WInput} instances. This set has to be unique in the sense @@ -170,14 +295,14 @@ Set get2WTransformers( * {@link Transformer3WInput} which has to be checked manually, as {@link * Transformer3WInput#equals(Object)} is NOT restricted on the uuid of {@link Transformer3WInput}. * - *

In contrast to {@link #getNodes()} this interface provides the ability to pass in an already + *

In contrast to {@link #getNodes()} this method provides the ability to pass in an already * existing set of {@link NodeInput}, {@link Transformer3WTypeInput} and {@link OperatorInput} * entities, the {@link Transformer3WInput} instances depend on. Doing so, already loaded nodes, * line types and operators can be recycled to improve performance and prevent unnecessary loading * operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances @@ -186,10 +311,18 @@ Set get2WTransformers( * entities * @return a set of object and uuid unique {@link Transformer3WInput} entities */ - Set get3WTransformers( + public Set get3WTransformers( Set nodes, Set transformer3WTypeInputs, - Set operators); + Set operators) + throws SourceException { + return Try.scanCollection( + buildTransformer3WEntities( + transformer3WInputFactory, nodes, transformer3WTypeInputs, operators), + Transformer3WInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a unique set of {@link SwitchInput} instances. @@ -201,7 +334,10 @@ Set get3WTransformers( * * @return a set of object and uuid unique {@link SwitchInput} entities */ - Set getSwitches(); + public Set getSwitches() throws SourceException { + Set operators = typeSource.getOperators(); + return getSwitches(getNodes(operators), operators); + } /** * Returns a set of {@link SwitchInput} instances. This set has to be unique in the sense of @@ -209,20 +345,24 @@ Set get3WTransformers( * {@link SwitchInput} which has to be checked manually, as {@link SwitchInput#equals(Object)} is * NOT restricted on the uuid of {@link SwitchInput}. * - *

In contrast to {@link #getNodes()} this interface provides the ability to pass in an already + *

In contrast to {@link #getNodes()} this method provides the ability to pass in an already * existing set of {@link NodeInput} and {@link OperatorInput} entities, the {@link SwitchInput} * instances depend on. Doing so, already loaded nodes, line types and operators can be recycled * to improve performance and prevent unnecessary loading operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances * @param nodes a set of object and uuid unique {@link NodeInput} entities * @return a set of object and uuid unique {@link SwitchInput} entities */ - Set getSwitches(Set nodes, Set operators); + public Set getSwitches(Set nodes, Set operators) + throws SourceException { + return buildUntypedConnectorInputEntities( + SwitchInput.class, switchInputFactory, nodes, operators); + } /** * Returns a unique set of {@link MeasurementUnitInput} instances. @@ -234,7 +374,10 @@ Set get3WTransformers( * * @return a set of object and uuid unique {@link MeasurementUnitInput} entities */ - Set getMeasurementUnits(); + public Set getMeasurementUnits() throws SourceException { + Set operators = typeSource.getOperators(); + return getMeasurementUnits(getNodes(operators), operators); + } /** * Returns a set of {@link MeasurementUnitInput} instances. This set has to be unique in the sense @@ -243,18 +386,267 @@ Set get3WTransformers( * MeasurementUnitInput#equals(Object)} is NOT restricted on the uuid of {@link * MeasurementUnitInput}. * - *

In contrast to {@link #getNodes()} this interface provides the ability to pass in an already + *

In contrast to {@link #getNodes()} this method provides the ability to pass in an already * existing set of {@link NodeInput} and {@link OperatorInput} entities, the {@link * MeasurementUnitInput} instances depend on. Doing so, already loaded nodes, line types and * operators can be recycled to improve performance and prevent unnecessary loading operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances * @param nodes a set of object and uuid unique {@link NodeInput} entities * @return a set of object and uuid unique {@link MeasurementUnitInput} entities */ - Set getMeasurementUnits(Set nodes, Set operators); + public Set getMeasurementUnits( + Set nodes, Set operators) throws SourceException { + return Try.scanCollection( + buildNodeAssetEntities( + MeasurementUnitInput.class, measurementUnitInputFactory, nodes, operators), + MeasurementUnitInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + public Set> buildNodeInputEntities( + Class entityClass, + EntityFactory factory, + Collection operators) { + return assetInputEntityDataStream(entityClass, operators) + .map(factory::get) + .collect(Collectors.toSet()); + } + + public Set buildUntypedConnectorInputEntities( + Class entityClass, + EntityFactory factory, + Collection nodes, + Collection operators) + throws SourceException { + return Try.scanCollection( + untypedConnectorInputEntityStream(entityClass, factory, nodes, operators) + .collect(Collectors.toSet()), + entityClass) + .transformF(SourceException::new) + .getOrThrow(); + } + + public Set> buildTransformer3WEntities( + Transformer3WInputFactory transformer3WInputFactory, + Collection nodes, + Collection transformer3WTypeInputs, + Collection operators) { + return buildTransformer3WEntityData( + buildTypedConnectorEntityData( + buildUntypedConnectorInputEntityData( + assetInputEntityDataStream(Transformer3WInput.class, operators), nodes), + transformer3WTypeInputs), + nodes) + .map(transformer3WInputFactory::get) + .collect(Collectors.toSet()); + } + + public + Set> buildTypedEntities( + Class entityClass, + EntityFactory> factory, + Collection nodes, + Collection operators, + Collection types) { + return typedEntityStream(entityClass, factory, nodes, operators, types) + .collect(Collectors.toSet()); + } + + /** + * Enriches the given untyped entity data with the equivalent asset type. If this is not possible, + * a {@link Failure} is returned. + * + * @param noTypeConnectorEntityDataStream Stream of untyped entity data + * @param availableTypes Yet available asset types + * @param Type of the asset type + * @return Stream of {@link Try} to enhanced data + */ + protected + Stream, SourceException>> buildTypedConnectorEntityData( + Stream> noTypeConnectorEntityDataStream, + Collection availableTypes) { + return noTypeConnectorEntityDataStream + .parallel() + .map( + noTypeEntityDataOpt -> + noTypeEntityDataOpt.flatMap( + noTypeEntityData -> findAndAddType(noTypeEntityData, availableTypes))); + } + + /** + * Converts a stream of {@link AssetInputEntityData} in connection with a collection of known + * {@link NodeInput}s to a stream of {@link ConnectorInputEntityData}. + * + * @param assetInputEntityDataStream Input stream of {@link AssetInputEntityData} + * @param nodes A collection of known nodes + * @return A stream on {@link Try} to matching {@link ConnectorInputEntityData} + */ + protected Stream> + buildUntypedConnectorInputEntityData( + Stream assetInputEntityDataStream, Collection nodes) { + return assetInputEntityDataStream + .parallel() + .map( + assetInputEntityData -> + buildUntypedConnectorInputEntityData(assetInputEntityData, nodes)); + } + + /** + * Converts a single given {@link AssetInputEntityData} in connection with a collection of known + * {@link NodeInput}s to {@link ConnectorInputEntityData}. If this is not possible, a {@link + * Failure}. + * + * @param assetInputEntityData Input entity data to convert + * @param nodes A collection of known nodes + * @return A {@link Try} to matching {@link ConnectorInputEntityData} + */ + protected Try buildUntypedConnectorInputEntityData( + AssetInputEntityData assetInputEntityData, Collection nodes) { + // get the raw data + Map fieldsToAttributes = assetInputEntityData.getFieldsToValues(); + + // get the two connector nodes + String nodeAUuid = fieldsToAttributes.get(NODE_A); + String nodeBUuid = fieldsToAttributes.get(NODE_B); + Optional nodeA = findFirstEntityByUuid(nodeAUuid, nodes); + Optional nodeB = findFirstEntityByUuid(nodeBUuid, nodes); + + // if nodeA or nodeB are not present we return a failure and log a + // warning + if (nodeA.isEmpty() || nodeB.isEmpty()) { + String debugString = + Stream.of( + new AbstractMap.SimpleEntry<>(nodeA, NODE_A + ": " + nodeAUuid), + new AbstractMap.SimpleEntry<>(nodeB, NODE_B + ": " + nodeBUuid)) + .filter(entry -> entry.getKey().isEmpty()) + .map(AbstractMap.SimpleEntry::getValue) + .collect(Collectors.joining("\n")); + + String skippingMessage = + buildSkippingMessage( + assetInputEntityData.getTargetClass().getSimpleName(), + fieldsToAttributes.get("uuid"), + fieldsToAttributes.get("id"), + debugString); + + return new Failure<>(new SourceException("Failure due to: " + skippingMessage)); + } + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().removeAll(new HashSet<>(Arrays.asList(NODE_A, NODE_B))); + + return new Success<>( + new ConnectorInputEntityData( + fieldsToAttributes, + assetInputEntityData.getTargetClass(), + assetInputEntityData.getOperatorInput(), + nodeA.get(), + nodeB.get())); + } + + private + Stream> typedEntityStream( + Class entityClass, + EntityFactory> factory, + Collection nodes, + Collection operators, + Collection types) { + return buildTypedConnectorEntityData( + buildUntypedConnectorInputEntityData( + assetInputEntityDataStream(entityClass, operators), nodes), + types) + .map(factory::get); + } + + public + Stream> untypedConnectorInputEntityStream( + Class entityClass, + EntityFactory factory, + Set nodes, + Set operators) { + return buildUntypedConnectorInputEntityData( + assetInputEntityDataStream(entityClass, operators), nodes) + .map(factory::get); + } + + private + Stream> untypedConnectorInputEntityStream( + Class entityClass, + EntityFactory factory, + Collection nodes, + Collection operators) { + return untypedConnectorInputEntityStream( + entityClass, factory, new HashSet<>(nodes), new HashSet<>(operators)); + } + + /** + * Enriches the Stream of tries on {@link Transformer3WInputEntityData} with the information of + * the internal node. + * + * @param typedConnectorEntityDataStream Stream of already typed input entity data + * @param nodes Yet available nodes + * @return A stream of {@link Try} on enriched data + */ + protected Stream> buildTransformer3WEntityData( + Stream, SourceException>> + typedConnectorEntityDataStream, + Collection nodes) { + return typedConnectorEntityDataStream + .parallel() + .map( + typedEntityDataOpt -> + typedEntityDataOpt.flatMap(typeEntityData -> addThirdNode(typeEntityData, nodes))); + } + + /** + * Enriches the third node to the already typed entity data of a three winding transformer. If no + * matching node can be found, return a {@link Failure}. + * + * @param typeEntityData Already typed entity data + * @param nodes Yet available nodes + * @return a {@link Try} to the enriched data + */ + protected Try addThirdNode( + TypedConnectorInputEntityData typeEntityData, + Collection nodes) { + + // get the raw data + Map fieldsToAttributes = typeEntityData.getFieldsToValues(); + + // get nodeC of the transformer + String nodeCUuid = fieldsToAttributes.get("nodeC"); + Optional nodeC = findFirstEntityByUuid(nodeCUuid, nodes); + + // if nodeC is not present we return a failure + // log a warning + if (nodeC.isEmpty()) { + String skippingMessage = + buildSkippingMessage( + typeEntityData.getTargetClass().getSimpleName(), + fieldsToAttributes.get("uuid"), + fieldsToAttributes.get("id"), + "nodeC: " + nodeCUuid); + return new Failure<>(new SourceException("Failure due to: " + skippingMessage)); + } + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove("nodeC"); + + return new Success<>( + new Transformer3WInputEntityData( + fieldsToAttributes, + typeEntityData.getTargetClass(), + typeEntityData.getOperatorInput(), + typeEntityData.getNodeA(), + typeEntityData.getNodeB(), + nodeC.get(), + typeEntityData.getType())); + } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/ResultEntitySource.java b/src/main/java/edu/ie3/datamodel/io/source/ResultEntitySource.java index 814b0abb6..87d9fcc21 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/ResultEntitySource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/ResultEntitySource.java @@ -5,6 +5,8 @@ */ package edu.ie3.datamodel.io.source; +import edu.ie3.datamodel.io.factory.SimpleEntityFactory; +import edu.ie3.datamodel.io.factory.result.*; import edu.ie3.datamodel.models.result.NodeResult; import edu.ie3.datamodel.models.result.ResultEntity; import edu.ie3.datamodel.models.result.connector.LineResult; @@ -14,7 +16,9 @@ import edu.ie3.datamodel.models.result.system.*; import edu.ie3.datamodel.models.result.thermal.CylindricalStorageResult; import edu.ie3.datamodel.models.result.thermal.ThermalHouseResult; +import java.util.Optional; import java.util.Set; +import java.util.stream.Collectors; /** * Interface that provides the capability to build entities of type {@link ResultEntity} container @@ -23,7 +27,38 @@ * @version 0.1 * @since 22 June 2021 */ -public interface ResultEntitySource { +public class ResultEntitySource extends EntitySource { + + private final SystemParticipantResultFactory systemParticipantResultFactory; + private final ThermalResultFactory thermalResultFactory; + private final SwitchResultFactory switchResultFactory; + private final NodeResultFactory nodeResultFactory; + private final ConnectorResultFactory connectorResultFactory; + private final FlexOptionsResultFactory flexOptionsResultFactory; + + public ResultEntitySource(DataSource dataSource) { + this.dataSource = dataSource; + + // init factories + this.systemParticipantResultFactory = new SystemParticipantResultFactory(); + this.thermalResultFactory = new ThermalResultFactory(); + this.switchResultFactory = new SwitchResultFactory(); + this.nodeResultFactory = new NodeResultFactory(); + this.connectorResultFactory = new ConnectorResultFactory(); + this.flexOptionsResultFactory = new FlexOptionsResultFactory(); + } + + public ResultEntitySource(DataSource dataSource, String dtfPattern) { + this.dataSource = dataSource; + + // init factories + this.systemParticipantResultFactory = new SystemParticipantResultFactory(dtfPattern); + this.thermalResultFactory = new ThermalResultFactory(); + this.switchResultFactory = new SwitchResultFactory(); + this.nodeResultFactory = new NodeResultFactory(); + this.connectorResultFactory = new ConnectorResultFactory(); + this.flexOptionsResultFactory = new FlexOptionsResultFactory(); + } /** * Returns a unique set of {@link NodeResult} instances. @@ -34,7 +69,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link NodeResult} entities */ - Set getNodeResults(); + public Set getNodeResults() { + return getResultEntities(NodeResult.class, nodeResultFactory); + } /** * Returns a unique set of {@link SwitchResult} instances. @@ -46,7 +83,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link SwitchResult} entities */ - Set getSwitchResults(); + public Set getSwitchResults() { + return getResultEntities(SwitchResult.class, switchResultFactory); + } /** * Returns a unique set of {@link LineResult} instances. @@ -57,7 +96,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link LineResult} entities */ - Set getLineResults(); + public Set getLineResults() { + return getResultEntities(LineResult.class, connectorResultFactory); + } /** * Returns a unique set of {@link Transformer2WResult} instances. @@ -69,7 +110,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link Transformer2WResult} entities */ - Set getTransformer2WResultResults(); + public Set getTransformer2WResultResults() { + return getResultEntities(Transformer2WResult.class, connectorResultFactory); + } /** * Returns a unique set of {@link Transformer3WResult} instances. @@ -81,7 +124,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link Transformer3WResult} entities */ - Set getTransformer3WResultResults(); + public Set getTransformer3WResultResults() { + return getResultEntities(Transformer3WResult.class, connectorResultFactory); + } /** * Returns a unique set of {@link FlexOptionsResult} instances. @@ -93,7 +138,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link FlexOptionsResult} entities */ - Set getFlexOptionsResults(); + public Set getFlexOptionsResults() { + return getResultEntities(FlexOptionsResult.class, flexOptionsResultFactory); + } /** * Returns a unique set of {@link LoadResult} instances. @@ -104,7 +151,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link LoadResult} entities */ - Set getLoadResults(); + public Set getLoadResults() { + return getResultEntities(LoadResult.class, systemParticipantResultFactory); + } /** * Returns a unique set of {@link PvResult} instances. @@ -115,7 +164,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link PvResult} entities */ - Set getPvResults(); + public Set getPvResults() { + return getResultEntities(PvResult.class, systemParticipantResultFactory); + } /** * Returns a unique set of {@link FixedFeedInResult} instances. @@ -127,7 +178,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link FixedFeedInResult} entities */ - Set getFixedFeedInResults(); + public Set getFixedFeedInResults() { + return getResultEntities(FixedFeedInResult.class, systemParticipantResultFactory); + } /** * Returns a unique set of {@link BmResult} instances. @@ -138,7 +191,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link BmResult} entities */ - Set getBmResults(); + public Set getBmResults() { + return getResultEntities(BmResult.class, systemParticipantResultFactory); + } /** * Returns a unique set of {@link ChpResult} instances. @@ -149,7 +204,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link ChpResult} entities */ - Set getChpResults(); + public Set getChpResults() { + return getResultEntities(ChpResult.class, systemParticipantResultFactory); + } /** * Returns a unique set of {@link WecResult} instances. @@ -160,7 +217,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link WecResult} entities */ - Set getWecResults(); + public Set getWecResults() { + return getResultEntities(WecResult.class, systemParticipantResultFactory); + } /** * Returns a unique set of {@link StorageResult} instances. @@ -172,7 +231,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link StorageResult} entities */ - Set getStorageResults(); + public Set getStorageResults() { + return getResultEntities(StorageResult.class, systemParticipantResultFactory); + } /** * Returns a unique set of {@link EvcsResult} instances. @@ -183,7 +244,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link EvcsResult} entities */ - Set getEvcsResults(); + public Set getEvcsResults() { + return getResultEntities(EvcsResult.class, systemParticipantResultFactory); + } /** * Returns a unique set of {@link EvResult} instances. @@ -194,7 +257,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link EvResult} entities */ - Set getEvResults(); + public Set getEvResults() { + return getResultEntities(EvResult.class, systemParticipantResultFactory); + } /** * Returns a unique set of {@link HpResult} instances. @@ -205,7 +270,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link HpResult} entities */ - Set getHpResults(); + public Set getHpResults() { + return getResultEntities(HpResult.class, systemParticipantResultFactory); + } /** * Returns a unique set of {@link CylindricalStorageResult} instances. @@ -217,7 +284,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link CylindricalStorageResult} entities */ - Set getCylindricalStorageResult(); + public Set getCylindricalStorageResult() { + return getResultEntities(CylindricalStorageResult.class, thermalResultFactory); + } /** * Returns a unique set of {@link ThermalHouseResult} instances. @@ -229,7 +298,9 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link ThermalHouseResult} entities */ - Set getThermalHouseResults(); + public Set getThermalHouseResults() { + return getResultEntities(ThermalHouseResult.class, thermalResultFactory); + } /** * Returns a unique set of {@link EmResult} instances. @@ -240,5 +311,32 @@ public interface ResultEntitySource { * * @return a set of object and uuid unique {@link EmResult} entities */ - Set getEmResults(); + public Set getEmResults() { + return getResultEntities(EmResult.class, systemParticipantResultFactory); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + private Set getResultEntities( + Class entityClass, SimpleEntityFactory factory) { + return simpleEntityDataStream(entityClass) + .map( + entityData -> + factory + .get(entityData) + .getData() + .flatMap(loadResult -> cast(entityClass, loadResult))) + .flatMap(Optional::stream) + .collect(Collectors.toSet()); + } + + private Optional cast( + Class entityClass, ResultEntity resultEntity) { + if (resultEntity.getClass().equals(entityClass)) { + // safe here as a) type is checked and b) csv data stream already filters non-fitting input + // data + return Optional.of(entityClass.cast(resultEntity)); + } else { + return Optional.empty(); + } + } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/SystemParticipantSource.java b/src/main/java/edu/ie3/datamodel/io/source/SystemParticipantSource.java index 3a742ca49..3754362dd 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/SystemParticipantSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/SystemParticipantSource.java @@ -5,6 +5,12 @@ */ package edu.ie3.datamodel.io.source; +import edu.ie3.datamodel.exceptions.FactoryException; +import edu.ie3.datamodel.exceptions.SourceException; +import edu.ie3.datamodel.exceptions.SystemParticipantsException; +import edu.ie3.datamodel.io.factory.EntityFactory; +import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData; +import edu.ie3.datamodel.io.factory.input.participant.*; import edu.ie3.datamodel.models.input.NodeInput; import edu.ie3.datamodel.models.input.OperatorInput; import edu.ie3.datamodel.models.input.container.SystemParticipants; @@ -12,39 +18,164 @@ import edu.ie3.datamodel.models.input.system.type.*; import edu.ie3.datamodel.models.input.thermal.ThermalBusInput; import edu.ie3.datamodel.models.input.thermal.ThermalStorageInput; -import java.util.Optional; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.*; +import java.util.*; import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** - * Interface that provides the capability to build entities of type {@link SystemParticipantInput} - * as well as {@link SystemParticipants} container from .csv files. - * - * @version 0.1 - * @since 08.04.20 + * Implementation that provides the capability to build entities of type {@link + * SystemParticipantInput} as well as {@link SystemParticipants} container. */ -public interface SystemParticipantSource extends DataSource { +public class SystemParticipantSource extends EntitySource { + + private static final String THERMAL_STORAGE = "thermalstorage"; + private static final String THERMAL_BUS = "thermalbus"; + + // general fields + private final TypeSource typeSource; + private final RawGridSource rawGridSource; + private final ThermalSource thermalSource; + + // factories + private final BmInputFactory bmInputFactory; + private final ChpInputFactory chpInputFactory; + private final EvInputFactory evInputFactory; + private final FixedFeedInInputFactory fixedFeedInInputFactory; + private final HpInputFactory hpInputFactory; + private final LoadInputFactory loadInputFactory; + private final PvInputFactory pvInputFactory; + private final StorageInputFactory storageInputFactory; + private final WecInputFactory wecInputFactory; + private final EvcsInputFactory evcsInputFactory; + private final EmInputFactory emInputFactory; + + public SystemParticipantSource( + TypeSource typeSource, + ThermalSource thermalSource, + RawGridSource rawGridSource, + DataSource dataSource) { + + this.typeSource = typeSource; + this.rawGridSource = rawGridSource; + this.thermalSource = thermalSource; + this.dataSource = dataSource; + + // init factories + this.bmInputFactory = new BmInputFactory(); + this.chpInputFactory = new ChpInputFactory(); + this.evInputFactory = new EvInputFactory(); + this.fixedFeedInInputFactory = new FixedFeedInInputFactory(); + this.hpInputFactory = new HpInputFactory(); + this.loadInputFactory = new LoadInputFactory(); + this.pvInputFactory = new PvInputFactory(); + this.storageInputFactory = new StorageInputFactory(); + this.wecInputFactory = new WecInputFactory(); + this.evcsInputFactory = new EvcsInputFactory(); + this.emInputFactory = new EmInputFactory(); + } /** - * Should return either a consistent instance of {@link SystemParticipants} wrapped in {@link - * Optional} or an empty {@link Optional}. The decision to use {@link Optional} instead of - * returning the {@link SystemParticipants} instance directly is motivated by the fact, that a - * {@link SystemParticipants} is a container instance that depends on several other entities. - * Without being complete, it is useless for further processing. + * Should return either a consistent instance of {@link SystemParticipants} or throw a {@link + * SourceException}. The decision to throw a {@link SourceException} instead of returning the + * incomplete {@link SystemParticipants} instance is motivated by the fact, that a {@link + * SystemParticipants} is a container instance that depends on several other entities. Without + * being complete, it is useless for further processing. * *

Hence, whenever at least one entity {@link SystemParticipants} depends on cannot be - * provided, {@link Optional#empty()} should be returned and extensive logging should provide - * enough information to debug the error and fix the persistent data that has been failed to - * processed. + * provided, {@link SourceException} should be thrown. The thrown exception should provide enough + * information to debug the error and fix the persistent data that has been failed to processed. * *

Furthermore, it is expected, that the specific implementation of this method ensures not * only the completeness of the resulting {@link SystemParticipants} instance, but also its * validity e.g. in the sense that not duplicate UUIDs exist within all entities contained in the * returning instance. * - * @return either a valid, complete {@link SystemParticipants} optional or {@link - * Optional#empty()} + * @return either a valid, complete {@link SystemParticipants} or throws a {@link SourceException} */ - Optional getSystemParticipants(); + public SystemParticipants getSystemParticipants() throws SourceException { + + // read all needed entities + /// start with types and operators + Set operators = typeSource.getOperators(); + Set bmTypes = typeSource.getBmTypes(); + Set chpTypes = typeSource.getChpTypes(); + Set evTypes = typeSource.getEvTypes(); + Set hpTypes = typeSource.getHpTypes(); + Set storageTypes = typeSource.getStorageTypes(); + Set wecTypes = typeSource.getWecTypes(); + + /// go on with the thermal assets + Set thermalBuses = thermalSource.getThermalBuses(operators); + Set thermalStorages = + thermalSource.getThermalStorages(operators, thermalBuses); + + /// go on with the nodes + Set nodes = rawGridSource.getNodes(operators); + Try, SourceException> fixedFeedInInputs = + Try.of(() -> getFixedFeedIns(nodes, operators), SourceException.class); + Try, SourceException> pvInputs = + Try.of(() -> getPvPlants(nodes, operators), SourceException.class); + Try, SourceException> loads = + Try.of(() -> getLoads(nodes, operators), SourceException.class); + Try, SourceException> bmInputs = + Try.of(() -> getBmPlants(nodes, operators, bmTypes), SourceException.class); + Try, SourceException> storages = + Try.of(() -> getStorages(nodes, operators, storageTypes), SourceException.class); + Try, SourceException> wecInputs = + Try.of(() -> getWecPlants(nodes, operators, wecTypes), SourceException.class); + Try, SourceException> evs = + Try.of(() -> getEvs(nodes, operators, evTypes), SourceException.class); + Try, SourceException> evcs = + Try.of(() -> getEvCS(nodes, operators), SourceException.class); + Try, SourceException> chpInputs = + Try.of( + () -> getChpPlants(nodes, operators, chpTypes, thermalBuses, thermalStorages), + SourceException.class); + Try, SourceException> hpInputs = + Try.of(() -> getHeatPumps(nodes, operators, hpTypes, thermalBuses), SourceException.class); + Try, SourceException> emInputs = + Try.of(() -> getEmSystems(nodes, operators), SourceException.class); + + List exceptions = + Try.getExceptions( + List.of( + fixedFeedInInputs, + pvInputs, + loads, + bmInputs, + storages, + wecInputs, + evs, + evcs, + chpInputs, + hpInputs, + emInputs)); + + if (!exceptions.isEmpty()) { + throw new SystemParticipantsException( + exceptions.size() + " error(s) occurred while initializing system participants. ", + exceptions); + } else { + // if everything is fine, return a system participants container + // getOrThrow should not throw an exception in this context, because all exception are + // filtered and thrown before + return new SystemParticipants( + bmInputs.getOrThrow(), + chpInputs.getOrThrow(), + evcs.getOrThrow(), + evs.getOrThrow(), + fixedFeedInInputs.getOrThrow(), + hpInputs.getOrThrow(), + loads.getOrThrow(), + pvInputs.getOrThrow(), + storages.getOrThrow(), + wecInputs.getOrThrow(), + emInputs.getOrThrow()); + } + } /** * Returns a unique set of {@link FixedFeedInInput} instances. @@ -56,7 +187,10 @@ public interface SystemParticipantSource extends DataSource { * * @return a set of object and uuid unique {@link FixedFeedInInput} entities */ - Set getFixedFeedIns(); + public Set getFixedFeedIns() throws SourceException { + Set operators = typeSource.getOperators(); + return getFixedFeedIns(rawGridSource.getNodes(operators), operators); + } /** * Returns a set of {@link FixedFeedInInput} instances. This set has to be unique in the sense of @@ -64,20 +198,28 @@ public interface SystemParticipantSource extends DataSource { * {@link FixedFeedInInput} which has to be checked manually, as {@link * FixedFeedInInput#equals(Object)} is NOT restricted on the uuid of {@link FixedFeedInInput}. * - *

In contrast to {@link #getFixedFeedIns()} this interface provides the ability to pass in an + *

In contrast to {@link #getFixedFeedIns()} this method provides the ability to pass in an * already existing set of {@link NodeInput} and {@link OperatorInput} entities, the {@link * FixedFeedInInput} instances depend on. Doing so, already loaded nodes can be recycled to * improve performance and prevent unnecessary loading operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances * @param nodes a set of object and uuid unique {@link NodeInput} entities * @return a set of object and uuid unique {@link FixedFeedInInput} entities */ - Set getFixedFeedIns(Set nodes, Set operators); + public Set getFixedFeedIns(Set nodes, Set operators) + throws SourceException { + return Try.scanCollection( + buildNodeAssetEntities( + FixedFeedInInput.class, fixedFeedInInputFactory, nodes, operators), + FixedFeedInInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a unique set of {@link PvInput} instances. @@ -88,7 +230,10 @@ public interface SystemParticipantSource extends DataSource { * * @return a set of object and uuid unique {@link PvInput} entities */ - Set getPvPlants(); + public Set getPvPlants() throws SourceException { + Set operators = typeSource.getOperators(); + return getPvPlants(rawGridSource.getNodes(operators), operators); + } /** * Returns a set of {@link PvInput} instances. This set has to be unique in the sense of object @@ -96,20 +241,26 @@ public interface SystemParticipantSource extends DataSource { * PvInput} which has to be checked manually, as {@link PvInput#equals(Object)} is NOT restricted * on the uuid of {@link PvInput}. * - *

In contrast to {@link #getPvPlants()} this interface provides the ability to pass in an - * already existing set of {@link NodeInput} and {@link OperatorInput} entities, the {@link - * PvInput} instances depend on. Doing so, already loaded nodes can be recycled to improve - * performance and prevent unnecessary loading operations. + *

In contrast to {@link #getPvPlants()} this method provides the ability to pass in an already + * existing set of {@link NodeInput} and {@link OperatorInput} entities, the {@link PvInput} + * instances depend on. Doing so, already loaded nodes can be recycled to improve performance and + * prevent unnecessary loading operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances * @param nodes a set of object and uuid unique {@link NodeInput} entities * @return a set of object and uuid unique {@link PvInput} entities */ - Set getPvPlants(Set nodes, Set operators); + public Set getPvPlants(Set nodes, Set operators) + throws SourceException { + return Try.scanCollection( + buildNodeAssetEntities(PvInput.class, pvInputFactory, nodes, operators), PvInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a unique set of {@link LoadInput} instances. @@ -120,7 +271,10 @@ public interface SystemParticipantSource extends DataSource { * * @return a set of object and uuid unique {@link LoadInput} entities */ - Set getLoads(); + public Set getLoads() throws SourceException { + Set operators = typeSource.getOperators(); + return getLoads(rawGridSource.getNodes(operators), operators); + } /** * Returns a set of {@link LoadInput} instances. This set has to be unique in the sense of object @@ -128,20 +282,27 @@ public interface SystemParticipantSource extends DataSource { * LoadInput} which has to be checked manually, as {@link LoadInput#equals(Object)} is NOT * restricted on the uuid of {@link LoadInput}. * - *

In contrast to {@link #getLoads()} this interface provides the ability to pass in an already + *

In contrast to {@link #getLoads()} this method provides the ability to pass in an already * existing set of {@link NodeInput} and {@link OperatorInput} entities, the {@link LoadInput} * instances depend on. Doing so, already loaded nodes can be recycled to improve performance and * prevent unnecessary loading operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances * @param nodes a set of object and uuid unique {@link NodeInput} entities * @return a set of object and uuid unique {@link LoadInput} entities */ - Set getLoads(Set nodes, Set operators); + public Set getLoads(Set nodes, Set operators) + throws SourceException { + return Try.scanCollection( + buildNodeAssetEntities(LoadInput.class, loadInputFactory, nodes, operators), + LoadInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a unique set of {@link EvcsInput} instances. @@ -152,7 +313,10 @@ public interface SystemParticipantSource extends DataSource { * * @return a set of object and uuid unique {@link EvcsInput} entities */ - Set getEvCS(); + public Set getEvCS() throws SourceException { + Set operators = typeSource.getOperators(); + return getEvCS(rawGridSource.getNodes(operators), operators); + } /** * Returns a set of {@link EvcsInput} instances. This set has to be unique in the sense of object @@ -160,20 +324,27 @@ public interface SystemParticipantSource extends DataSource { * EvcsInput} which has to be checked manually, as {@link EvcsInput#equals(Object)} is NOT * restricted on the uuid of {@link EvcsInput}. * - *

In contrast to {@link #getEvCS()} this interface provides the ability to pass in an already + *

In contrast to {@link #getEvCS()} this method provides the ability to pass in an already * existing set of {@link NodeInput} and {@link OperatorInput} entities, the {@link EvcsInput} * instances depend on. Doing so, already loaded nodes can be recycled to improve performance and * prevent unnecessary loading operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances * @param nodes a set of object and uuid unique {@link NodeInput} entities * @return a set of object and uuid unique {@link EvcsInput} entities */ - Set getEvCS(Set nodes, Set operators); + public Set getEvCS(Set nodes, Set operators) + throws SourceException { + return Try.scanCollection( + buildNodeAssetEntities(EvcsInput.class, evcsInputFactory, nodes, operators), + EvcsInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a unique set of {@link BmInput} instances. @@ -184,7 +355,10 @@ public interface SystemParticipantSource extends DataSource { * * @return a set of object and uuid unique {@link BmInput} entities */ - Set getBmPlants(); + public Set getBmPlants() throws SourceException { + Set operators = typeSource.getOperators(); + return getBmPlants(rawGridSource.getNodes(operators), operators, typeSource.getBmTypes()); + } /** * Returns a set of {@link BmInput} instances. This set has to be unique in the sense of object @@ -192,13 +366,13 @@ public interface SystemParticipantSource extends DataSource { * BmInput} which has to be checked manually, as {@link BmInput#equals(Object)} is NOT restricted * on the uuid of {@link BmInput}. * - *

In contrast to {@link #getBmPlants()} this interface provides the ability to pass in an - * already existing set of {@link NodeInput}, {@link BmTypeInput} and {@link OperatorInput} - * entities, the {@link BmInput} instances depend on. Doing so, already loaded nodes can be - * recycled to improve performance and prevent unnecessary loading operations. + *

In contrast to {@link #getBmPlants()} this method provides the ability to pass in an already + * existing set of {@link NodeInput}, {@link BmTypeInput} and {@link OperatorInput} entities, the + * {@link BmInput} instances depend on. Doing so, already loaded nodes can be recycled to improve + * performance and prevent unnecessary loading operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances @@ -206,8 +380,16 @@ public interface SystemParticipantSource extends DataSource { * @param types a set of object and uuid unique {@link BmTypeInput} entities * @return a set of object and uuid unique {@link BmInput} entities */ - Set getBmPlants( - Set nodes, Set operators, Set types); + public Set getBmPlants( + Set nodes, Set operators, Set types) + throws SourceException { + return Try.scanCollection( + buildTypedSystemParticipantEntities( + BmInput.class, bmInputFactory, nodes, operators, types), + BmInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a unique set of {@link StorageInput} instances. @@ -219,7 +401,10 @@ Set getBmPlants( * * @return a set of object and uuid unique {@link StorageInput} entities */ - Set getStorages(); + public Set getStorages() throws SourceException { + Set operators = typeSource.getOperators(); + return getStorages(rawGridSource.getNodes(operators), operators, typeSource.getStorageTypes()); + } /** * Returns a set of {@link StorageInput} instances. This set has to be unique in the sense of @@ -227,13 +412,13 @@ Set getBmPlants( * {@link StorageInput} which has to be checked manually, as {@link StorageInput#equals(Object)} * is NOT restricted on the uuid of {@link StorageInput}. * - *

In contrast to {@link #getStorages()} this interface provides the ability to pass in an - * already existing set of {@link NodeInput}, {@link StorageTypeInput} and {@link OperatorInput} - * entities, the {@link StorageInput} instances depend on. Doing so, already loaded nodes can be - * recycled to improve performance and prevent unnecessary loading operations. + *

In contrast to {@link #getStorages()} this method provides the ability to pass in an already + * existing set of {@link NodeInput}, {@link StorageTypeInput} and {@link OperatorInput} entities, + * the {@link StorageInput} instances depend on. Doing so, already loaded nodes can be recycled to + * improve performance and prevent unnecessary loading operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances @@ -241,8 +426,16 @@ Set getBmPlants( * @param types a set of object and uuid unique {@link StorageTypeInput} entities * @return a set of object and uuid unique {@link StorageInput} entities */ - Set getStorages( - Set nodes, Set operators, Set types); + public Set getStorages( + Set nodes, Set operators, Set types) + throws SourceException { + return Try.scanCollection( + buildTypedSystemParticipantEntities( + StorageInput.class, storageInputFactory, nodes, operators, types), + StorageInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a unique set of {@link WecInput} instances. @@ -253,7 +446,10 @@ Set getStorages( * * @return a set of object and uuid unique {@link WecInput} entities */ - Set getWecPlants(); + public Set getWecPlants() throws SourceException { + Set operators = typeSource.getOperators(); + return getWecPlants(rawGridSource.getNodes(operators), operators, typeSource.getWecTypes()); + } /** * Returns a set of {@link WecInput} instances. This set has to be unique in the sense of object @@ -261,13 +457,13 @@ Set getStorages( * WecInput} which has to be checked manually, as {@link WecInput#equals(Object)} is NOT * restricted on the uuid of {@link WecInput}. * - *

In contrast to {@link #getWecPlants()} this interface provides the ability to pass in an + *

In contrast to {@link #getWecPlants()} this method provides the ability to pass in an * already existing set of {@link NodeInput}, {@link WecTypeInput} and {@link OperatorInput} * entities, the {@link WecInput} instances depend on. Doing so, already loaded nodes can be * recycled to improve performance and prevent unnecessary loading operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances @@ -275,8 +471,16 @@ Set getStorages( * @param types a set of object and uuid unique {@link WecTypeInput} entities * @return a set of object and uuid unique {@link WecInput} entities */ - Set getWecPlants( - Set nodes, Set operators, Set types); + public Set getWecPlants( + Set nodes, Set operators, Set types) + throws SourceException { + return Try.scanCollection( + buildTypedSystemParticipantEntities( + WecInput.class, wecInputFactory, nodes, operators, types), + WecInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a unique set of {@link EvInput} instances. @@ -287,7 +491,10 @@ Set getWecPlants( * * @return a set of object and uuid unique {@link EvInput} entities */ - Set getEvs(); + public Set getEvs() throws SourceException { + Set operators = typeSource.getOperators(); + return getEvs(rawGridSource.getNodes(operators), operators, typeSource.getEvTypes()); + } /** * Returns a set of {@link EvInput} instances. This set has to be unique in the sense of object @@ -295,13 +502,13 @@ Set getWecPlants( * EvInput} which has to be checked manually, as {@link EvInput#equals(Object)} is NOT restricted * on the uuid of {@link EvInput}. * - *

In contrast to {@link #getEvs()} this interface provides the ability to pass in an already + *

In contrast to {@link #getEvs()} this method provides the ability to pass in an already * existing set of {@link NodeInput}, {@link EvTypeInput} and {@link OperatorInput} entities, the * {@link EvInput} instances depend on. Doing so, already loaded nodes can be recycled to improve * performance and prevent unnecessary loading operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances @@ -309,116 +516,459 @@ Set getWecPlants( * @param types a set of object and uuid unique {@link EvTypeInput} entities * @return a set of object and uuid unique {@link EvInput} entities */ - Set getEvs(Set nodes, Set operators, Set types); + public Set getEvs( + Set nodes, Set operators, Set types) + throws SourceException { + return Try.scanCollection( + buildTypedSystemParticipantEntities( + EvInput.class, evInputFactory, nodes, operators, types), + EvInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** - * Returns a unique set of {@link ChpInput} instances. + * Returns a unique set of {@link EmInput} instances. * *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link - * java.util.UUID} uniqueness of the provided {@link ChpInput} which has to be checked manually, - * as {@link ChpInput#equals(Object)} is NOT restricted on the uuid of {@link ChpInput}. + * java.util.UUID} uniqueness of the provided {@link EmInput} which has to be checked manually, as + * {@link EmInput#equals(Object)} is NOT restricted on the uuid of {@link EmInput}. * - * @return a set of object and uuid unique {@link ChpInput} entities + * @return a set of object and uuid unique {@link EmInput} entities */ - Set getChpPlants(); + public Set getEmSystems() throws SourceException { + Set operators = typeSource.getOperators(); + return getEmSystems(rawGridSource.getNodes(operators), operators); + } /** - * Returns a set of {@link ChpInput} instances. This set has to be unique in the sense of object - * uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided {@link - * ChpInput} which has to be checked manually, as {@link ChpInput#equals(Object)} is NOT - * restricted on the uuid of {@link ChpInput}. + * This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link EmInput} which has to be checked manually, as + * {@link EmInput#equals(Object)} is NOT restricted on the uuid of {@link EmInput}. * - *

In contrast to {@link #getChpPlants()} this interface provides the ability to pass in an - * already existing set of {@link NodeInput}, {@link ChpTypeInput}, {@link ThermalBusInput}, - * {@link ThermalStorageInput} and {@link OperatorInput} entities, the {@link ChpInput} instances - * depend on. Doing so, already loaded nodes can be recycled to improve performance and prevent - * unnecessary loading operations. + *

In contrast to {@link #getHeatPumps()} this method provides the ability to pass in an + * already existing set of {@link NodeInput} and {@link OperatorInput} entities, the {@link + * EmInput} instances depend on. Doing so, already loaded nodes can be recycled to improve + * performance and prevent unnecessary loading operations. * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. + *

If something fails during the creation process a {@link SourceException} is thrown, else a + * set with all entities that has been able to be build is returned. * * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for * the returning instances * @param nodes a set of object and uuid unique {@link NodeInput} entities - * @param types a set of object and uuid unique {@link ChpTypeInput} entities - * @param thermalBuses a set of object and uuid unique {@link ThermalBusInput} entities - * @param thermalStorages a set of object and uuid unique {@link ThermalStorageInput} entities - * @return a set of object and uuid unique {@link ChpInput} entities + * @return a set of object and uuid unique {@link EmInput} entities */ - Set getChpPlants( + public Set getEmSystems(Set nodes, Set operators) + throws SourceException { + return Try.scanCollection( + buildNodeAssetEntities(EmInput.class, emInputFactory, nodes, operators), EmInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } + + public Set getChpPlants() throws SourceException { + Set operators = typeSource.getOperators(); + Set thermalBuses = thermalSource.getThermalBuses(operators); + return getChpPlants( + rawGridSource.getNodes(operators), + operators, + typeSource.getChpTypes(), + thermalBuses, + thermalSource.getThermalStorages(operators, thermalBuses)); + } + + /** + * If one of the sets of {@link NodeInput}, {@link ThermalBusInput}, {@link ThermalStorageInput} + * or {@link ChpTypeInput} entities is not exhaustive for all available {@link ChpInput} entities + * (e.g. a {@link NodeInput} or {@link ChpTypeInput} entity is missing) or if an error during the + * building process occurs a {@link SourceException} is thrown, else all entities that are able to + * be built will be returned. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + public Set getChpPlants( Set nodes, Set operators, Set types, Set thermalBuses, - Set thermalStorages); + Set thermalStorages) + throws SourceException { + return Try.scanCollection( + buildChpInputEntities( + chpInputFactory, nodes, operators, types, thermalBuses, thermalStorages), + ChpInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } - /** - * Returns a unique set of {@link HpInput} instances. - * - *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link - * java.util.UUID} uniqueness of the provided {@link HpInput} which has to be checked manually, as - * {@link HpInput#equals(Object)} is NOT restricted on the uuid of {@link HpInput}. - * - * @return a set of object and uuid unique {@link HpInput} entities - */ - Set getHeatPumps(); + public Set getHeatPumps() throws SourceException { + Set operators = typeSource.getOperators(); + return getHeatPumps( + rawGridSource.getNodes(operators), + operators, + typeSource.getHpTypes(), + thermalSource.getThermalBuses()); + } /** - * Returns a set of {@link HpInput} instances. This set has to be unique in the sense of object - * uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided {@link - * HpInput} which has to be checked manually, as {@link HpInput#equals(Object)} is NOT restricted - * on the uuid of {@link HpInput}. - * - *

In contrast to {@link #getHeatPumps()} this interface provides the ability to pass in an - * already existing set of {@link NodeInput}, {@link HpTypeInput}, {@link ThermalBusInput}, {@link - * ThermalStorageInput} and {@link OperatorInput} entities, the {@link HpInput} instances depend - * on. Doing so, already loaded nodes can be recycled to improve performance and prevent - * unnecessary loading operations. - * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. - * - * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for - * the returning instances - * @param nodes a set of object and uuid unique {@link NodeInput} entities - * @param types a set of object and uuid unique {@link HpTypeInput} entities - * @param thermalBuses a set of object and uuid unique {@link ThermalBusInput} entities - * @return a set of object and uuid unique {@link HpInput} entities + * If one of the sets of {@link NodeInput}, {@link ThermalBusInput} or {@link HpTypeInput} + * entities is not exhaustive for all available {@link HpInput} entities (e.g. a {@link NodeInput} + * or {@link HpTypeInput} entity is missing) or if an error during the building process occurs a + * {@link SourceException} is thrown, else all entities that are able to be built will be + * returned. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} */ - Set getHeatPumps( + public Set getHeatPumps( Set nodes, Set operators, Set types, - Set thermalBuses); + Set thermalBuses) + throws SourceException { + return Try.scanCollection( + buildHpInputEntities(hpInputFactory, nodes, operators, types, thermalBuses), + HpInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + + private + Set> buildTypedSystemParticipantEntities( + Class entityClass, + EntityFactory> factory, + Collection nodes, + Collection operators, + Collection types) { + return typedSystemParticipantEntityStream(entityClass, factory, nodes, operators, types) + .collect(Collectors.toSet()); + } + + private Set> buildChpInputEntities( + ChpInputFactory factory, + Collection nodes, + Collection operators, + Collection chpTypes, + Collection thermalBuses, + Collection thermalStorages) { + return chpInputStream(factory, nodes, operators, chpTypes, thermalBuses, thermalStorages) + .collect(Collectors.toSet()); + } + + private Set> buildHpInputEntities( + HpInputFactory factory, + Collection nodes, + Collection operators, + Collection types, + Collection thermalBuses) { + return hpInputStream(factory, nodes, operators, types, thermalBuses) + .collect(Collectors.toSet()); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- /** - * Returns a unique set of {@link EmInput} instances. - * - *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link - * java.util.UUID} uniqueness of the provided {@link EmInput} which has to be checked manually, as - * {@link EmInput#equals(Object)} is NOT restricted on the uuid of {@link EmInput}. - * - * @return a set of object and uuid unique {@link EmInput} entities + * Constructs a stream of {@link SystemParticipantInput} entities wrapped in {@link Try}'s. + * + * @param entityClass the class of the entities that should be built + * @param factory the corresponding factory that is capable of building this entities + * @param nodes the nodes that should be considered for these entities + * @param operators the operators that should be considered for these entities + * @param types the types that should be considered for these entities + * @param the type of the resulting entity + * @param the type of the type model of the resulting entity + * @return a stream of tries being either empty or holding an instance of a {@link + * SystemParticipantInput} of the requested entity class */ - Set getEmSystems(); + private + Stream> typedSystemParticipantEntityStream( + Class entityClass, + EntityFactory> factory, + Collection nodes, + Collection operators, + Collection types) { + return buildTypedSystemParticipantEntityData( + nodeAssetInputEntityDataStream( + assetInputEntityDataStream(entityClass, operators), nodes), + types) + .map(factory::get); + } + + private Stream> chpInputStream( + ChpInputFactory factory, + Collection nodes, + Collection operators, + Collection types, + Collection thermalBuses, + Collection thermalStorages) { + return buildChpEntityData( + buildTypedEntityData( + nodeAssetInputEntityDataStream( + assetInputEntityDataStream(ChpInput.class, operators), nodes), + types), + thermalStorages, + thermalBuses) + .map(factory::get); + } + + private Stream> hpInputStream( + HpInputFactory factory, + Collection nodes, + Collection operators, + Collection types, + Collection thermalBuses) { + return buildHpEntityData( + buildTypedEntityData( + nodeAssetInputEntityDataStream( + assetInputEntityDataStream(HpInput.class, operators), nodes), + types), + thermalBuses) + .map(factory::get); + } /** - * This set has to be unique in the sense of object uniqueness but also in the sense of {@link - * java.util.UUID} uniqueness of the provided {@link EmInput} which has to be checked manually, as - * {@link EmInput#equals(Object)} is NOT restricted on the uuid of {@link EmInput}. - * - *

In contrast to {@link #getHeatPumps()} this interface provides the ability to pass in an - * already existing set of {@link NodeInput} and {@link OperatorInput} entities, the {@link - * EmInput} instances depend on. Doing so, already loaded nodes can be recycled to improve - * performance and prevent unnecessary loading operations. - * - *

If something fails during the creation process it's up to the concrete implementation of an - * empty set or a set with all entities that has been able to be build is returned. - * - * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for - * the returning instances - * @param nodes a set of object and uuid unique {@link NodeInput} entities - * @return a set of object and uuid unique {@link EmInput} entities + * Enriches a given stream of {@link NodeAssetInputEntityData} {@link Try} objects with a type of + * {@link SystemParticipantTypeInput} based on the provided collection of types and the fields to + * values mapping that inside the already provided {@link NodeAssetInputEntityData} instance. + * + * @param nodeAssetEntityDataStream the data stream of {@link NodeAssetInputEntityData} {@link + * Try} objects + * @param types the types that should be used for enrichment and to build {@link + * SystemParticipantTypedEntityData} from + * @param the type of the provided entity types as well as the type parameter of the resulting + * {@link SystemParticipantTypedEntityData} + * @return a stream of tries of {@link SystemParticipantTypedEntityData} instances */ - Set getEmSystems(Set nodes, Set operators); + private + Stream, SourceException>> + buildTypedSystemParticipantEntityData( + Stream> nodeAssetEntityDataStream, + Collection types) { + return nodeAssetEntityDataStream + .parallel() + .map( + nodeAssetInputEntityDataOpt -> + nodeAssetInputEntityDataOpt.flatMap( + nodeAssetInputEntityData -> + buildTypedSystemParticipantEntityData(nodeAssetInputEntityData, types))); + } + + protected + Try, SourceException> + buildTypedSystemParticipantEntityData( + NodeAssetInputEntityData nodeAssetInputEntityData, Collection types) { + return getAssetType( + types, + nodeAssetInputEntityData.getFieldsToValues(), + nodeAssetInputEntityData.getClass().getSimpleName()) + .map( + // if the optional is present, transform and return to the data, + // otherwise return an empty optional + assetType -> { + Map fieldsToAttributes = nodeAssetInputEntityData.getFieldsToValues(); + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove(TYPE); + + return new SystemParticipantTypedEntityData<>( + fieldsToAttributes, + nodeAssetInputEntityData.getTargetClass(), + nodeAssetInputEntityData.getOperatorInput(), + nodeAssetInputEntityData.getNode(), + assetType); + }); + } + + /** + * Enriches a given stream of {@link NodeAssetInputEntityData} tries with a type of {@link + * SystemParticipantTypeInput} based on the provided collection of types and the fields to values + * mapping that inside the already provided {@link NodeAssetInputEntityData} instance. + * + * @param nodeAssetEntityDataStream the data stream of {@link NodeAssetInputEntityData} tries + * @param types the types that should be used for enrichment and to build {@link + * SystemParticipantTypedEntityData} from + * @param the type of the provided entity types as well as the type parameter of the resulting + * {@link SystemParticipantTypedEntityData} + * @return a stream of tries of {@link SystemParticipantTypedEntityData} instances + */ + private + Stream, SourceException>> buildTypedEntityData( + Stream> nodeAssetEntityDataStream, + Collection types) { + return nodeAssetEntityDataStream + .parallel() + .map( + nodeAssetInputEntityDataOpt -> + nodeAssetInputEntityDataOpt.flatMap( + nodeAssetInputEntityData -> + buildTypedEntityData(nodeAssetInputEntityData, types))); + } + + protected + Try, SourceException> buildTypedEntityData( + NodeAssetInputEntityData nodeAssetInputEntityData, Collection types) { + return getAssetType( + types, + nodeAssetInputEntityData.getFieldsToValues(), + nodeAssetInputEntityData.getClass().getSimpleName()) + .map( + // if the optional is present, transform and return to the data, + // otherwise return an empty optional + assetType -> { + Map fieldsToAttributes = nodeAssetInputEntityData.getFieldsToValues(); + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove(TYPE); + + return new SystemParticipantTypedEntityData<>( + fieldsToAttributes, + nodeAssetInputEntityData.getTargetClass(), + nodeAssetInputEntityData.getOperatorInput(), + nodeAssetInputEntityData.getNode(), + assetType); + }); + } + + /** + * Enriches a given stream of {@link SystemParticipantTypedEntityData} tries with a type of {@link + * ThermalBusInput} based on the provided collection of buses and the fields to values mapping + * inside the already provided {@link SystemParticipantTypedEntityData} instance. + * + * @param typedEntityDataStream the data stream of {@link SystemParticipantTypedEntityData} tries + * @param thermalBuses the thermal buses that should be used for enrichment and to build {@link + * HpInputEntityData} + * @return stream of tries of {@link HpInputEntityData} instances + */ + private Stream> buildHpEntityData( + Stream, SourceException>> + typedEntityDataStream, + Collection thermalBuses) { + + return typedEntityDataStream + .parallel() + .map( + typedEntityDataOpt -> + typedEntityDataOpt.flatMap( + typedEntityData -> buildHpEntityData(typedEntityData, thermalBuses))); + } + + protected Try buildHpEntityData( + SystemParticipantTypedEntityData typedEntityData, + Collection thermalBuses) { + // get the raw data + Map fieldsToAttributes = typedEntityData.getFieldsToValues(); + + // get the thermal bus input for this chp unit and try to built the entity data + Optional hpInputEntityDataOpt = + Optional.ofNullable(fieldsToAttributes.get(THERMAL_BUS)) + .flatMap( + thermalBusUuid -> + thermalBuses.stream() + .filter( + storage -> + storage.getUuid().toString().equalsIgnoreCase(thermalBusUuid)) + .findFirst() + .map( + thermalBus -> { + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove(THERMAL_BUS); + + return new HpInputEntityData( + fieldsToAttributes, + typedEntityData.getOperatorInput(), + typedEntityData.getNode(), + typedEntityData.getTypeInput(), + thermalBus); + })); + + // if the requested entity is not present we return an empty element and + // log a warning + if (hpInputEntityDataOpt.isEmpty()) { + String skippingMessage = + buildSkippingMessage( + typedEntityData.getTargetClass().getSimpleName(), + safeMapGet(fieldsToAttributes, "uuid", FIELDS_TO_VALUES_MAP), + safeMapGet(fieldsToAttributes, "id", FIELDS_TO_VALUES_MAP), + "thermalBus: " + safeMapGet(fieldsToAttributes, THERMAL_BUS, FIELDS_TO_VALUES_MAP)); + return new Failure<>(new SourceException("Failure due to: " + skippingMessage)); + } + + return new Success<>(hpInputEntityDataOpt.get()); + } + + private Stream> buildChpEntityData( + Stream, SourceException>> + typedEntityDataStream, + Collection thermalStorages, + Collection thermalBuses) { + + return typedEntityDataStream + .parallel() + .map( + typedEntityDataOpt -> + typedEntityDataOpt.flatMap( + typedEntityData -> + buildChpEntityData(typedEntityData, thermalStorages, thermalBuses))); + } + + protected Try buildChpEntityData( + SystemParticipantTypedEntityData typedEntityData, + Collection thermalStorages, + Collection thermalBuses) { + + // get the raw data + Map fieldsToAttributes = typedEntityData.getFieldsToValues(); + + // get the thermal storage input for this chp unit + Optional thermalStorage = + Optional.ofNullable(fieldsToAttributes.get(THERMAL_STORAGE)) + .flatMap( + thermalStorageUuid -> findFirstEntityByUuid(thermalStorageUuid, thermalStorages)); + + // get the thermal bus input for this chp unit + Optional thermalBus = + Optional.ofNullable(fieldsToAttributes.get("thermalBus")) + .flatMap(thermalBusUuid -> findFirstEntityByUuid(thermalBusUuid, thermalBuses)); + + // if the thermal storage or the thermal bus are not present we return an + // empty element and log a warning + if (thermalStorage.isEmpty() || thermalBus.isEmpty()) { + StringBuilder sB = new StringBuilder(); + if (thermalStorage.isEmpty()) { + sB.append("thermalStorage: ") + .append(safeMapGet(fieldsToAttributes, THERMAL_STORAGE, FIELDS_TO_VALUES_MAP)); + } + if (thermalBus.isEmpty()) { + sB.append("\nthermalBus: ") + .append(safeMapGet(fieldsToAttributes, THERMAL_BUS, FIELDS_TO_VALUES_MAP)); + } + + String skippingMessage = + buildSkippingMessage( + typedEntityData.getTargetClass().getSimpleName(), + safeMapGet(fieldsToAttributes, "uuid", FIELDS_TO_VALUES_MAP), + safeMapGet(fieldsToAttributes, "id", FIELDS_TO_VALUES_MAP), + sB.toString()); + return new Failure<>(new SourceException("Failure due to: " + skippingMessage)); + } + + // remove fields that are passed as objects to constructor + fieldsToAttributes + .keySet() + .removeAll(new HashSet<>(Arrays.asList("thermalBus", "thermalStorage"))); + + return new Success<>( + new ChpInputEntityData( + fieldsToAttributes, + typedEntityData.getOperatorInput(), + typedEntityData.getNode(), + typedEntityData.getTypeInput(), + thermalBus.get(), + thermalStorage.get())); + } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/ThermalSource.java b/src/main/java/edu/ie3/datamodel/io/source/ThermalSource.java index dbcabdfdd..48384565f 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/ThermalSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/ThermalSource.java @@ -5,12 +5,20 @@ */ package edu.ie3.datamodel.io.source; +import edu.ie3.datamodel.exceptions.FactoryException; +import edu.ie3.datamodel.exceptions.FailureException; +import edu.ie3.datamodel.exceptions.SourceException; +import edu.ie3.datamodel.io.factory.input.*; import edu.ie3.datamodel.models.input.OperatorInput; import edu.ie3.datamodel.models.input.thermal.CylindricalStorageInput; import edu.ie3.datamodel.models.input.thermal.ThermalBusInput; import edu.ie3.datamodel.models.input.thermal.ThermalHouseInput; import edu.ie3.datamodel.models.input.thermal.ThermalStorageInput; -import java.util.Set; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.*; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** * Interface that provides the capability to build thermal {@link @@ -20,7 +28,23 @@ * @version 0.1 * @since 08.04.20 */ -public interface ThermalSource extends DataSource { +public class ThermalSource extends EntitySource { + // general fields + private final TypeSource typeSource; + + // factories + private final ThermalBusInputFactory thermalBusInputFactory; + private final CylindricalStorageInputFactory cylindricalStorageInputFactory; + private final ThermalHouseInputFactory thermalHouseInputFactory; + + public ThermalSource(TypeSource typeSource, DataSource dataSource) { + this.typeSource = typeSource; + this.dataSource = dataSource; + + this.thermalBusInputFactory = new ThermalBusInputFactory(); + this.cylindricalStorageInputFactory = new CylindricalStorageInputFactory(); + this.thermalHouseInputFactory = new ThermalHouseInputFactory(); + } /** * Returns a unique set of {@link ThermalBusInput} instances. @@ -32,7 +56,9 @@ public interface ThermalSource extends DataSource { * * @return a set of object and uuid unique {@link ThermalBusInput} entities */ - Set getThermalBuses(); + public Set getThermalBuses() throws SourceException { + return getThermalBuses(typeSource.getOperators()); + } /** * Returns a set of {@link ThermalBusInput} instances. This set has to be unique in the sense of @@ -52,7 +78,13 @@ public interface ThermalSource extends DataSource { * the returning instances * @return a set of object and uuid unique {@link ThermalBusInput} entities */ - Set getThermalBuses(Set operators); + public Set getThermalBuses(Set operators) throws SourceException { + return Try.scanCollection( + buildAssetInputEntities(ThermalBusInput.class, thermalBusInputFactory, operators), + ThermalBusInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a unique set of instances of all entities implementing the {@link ThermalStorageInput} @@ -65,7 +97,9 @@ public interface ThermalSource extends DataSource { * * @return a set of object and uuid unique {@link ThermalStorageInput} entities */ - Set getThermalStorages(); + public Set getThermalStorages() throws SourceException { + return new HashSet<>(getCylindricStorages()); + } /** * Returns a unique set of instances of all entities implementing the {@link ThermalStorageInput} @@ -88,8 +122,10 @@ public interface ThermalSource extends DataSource { * for the returning instances * @return a set of object and uuid unique {@link ThermalStorageInput} entities */ - Set getThermalStorages( - Set operators, Set thermalBuses); + public Set getThermalStorages( + Set operators, Set thermalBuses) throws SourceException { + return new HashSet<>(getCylindricStorages(operators, thermalBuses)); + } /** * Returns a unique set of {@link ThermalHouseInput} instances. @@ -101,7 +137,11 @@ Set getThermalStorages( * * @return a set of object and uuid unique {@link ThermalHouseInput} entities */ - Set getThermalHouses(); + public Set getThermalHouses() throws SourceException { + return buildThermalHouseInputEntities(thermalHouseInputFactory) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a set of {@link ThermalHouseInput} instances. This set has to be unique in the sense of @@ -123,8 +163,12 @@ Set getThermalStorages( * for the returning instances * @return a set of object and uuid unique {@link ThermalHouseInput} entities */ - Set getThermalHouses( - Set operators, Set thermalBuses); + public Set getThermalHouses( + Set operators, Set thermalBuses) throws SourceException { + return buildThermalHouseInputEntities(thermalHouseInputFactory, operators, thermalBuses) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a unique set of {@link CylindricalStorageInput} instances. @@ -136,7 +180,11 @@ Set getThermalHouses( * * @return a set of object and uuid unique {@link CylindricalStorageInput} entities */ - Set getCylindricStorages(); + public Set getCylindricStorages() throws SourceException { + return buildCylindricalStorageInputEntities(cylindricalStorageInputFactory) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a set of {@link CylindricalStorageInput} instances. This set has to be unique in the @@ -159,6 +207,107 @@ Set getThermalHouses( * for the returning instances * @return a set of object and uuid unique {@link CylindricalStorageInput} entities */ - Set getCylindricStorages( - Set operators, Set thermalBuses); + public Set getCylindricStorages( + Set operators, Set thermalBuses) throws SourceException { + return Try.scanCollection( + buildCylindricalStorageInputEntities( + cylindricalStorageInputFactory, operators, thermalBuses), + CylindricalStorageInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + + protected Stream> + buildThermalUnitInputEntityData( + AssetInputEntityData assetInputEntityData, Collection thermalBuses) { + + // get the raw data + Map fieldsToAttributes = assetInputEntityData.getFieldsToValues(); + + // get the thermal bus input for this chp unit + String thermalBusUuid = fieldsToAttributes.get("thermalbus"); + Optional thermalBus = + thermalBuses.stream() + .filter(storage -> storage.getUuid().toString().equalsIgnoreCase(thermalBusUuid)) + .findFirst(); + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().removeAll(new HashSet<>(Collections.singletonList("thermalbus"))); + + // if the type is not present we return an empty element and + // log a warning + if (thermalBus.isEmpty()) { + String skippingMessage = + buildSkippingMessage( + assetInputEntityData.getTargetClass().getSimpleName(), + fieldsToAttributes.get("uuid"), + fieldsToAttributes.get("id"), + "thermalBus: " + thermalBusUuid); + return Stream.of(new Failure<>(new SourceException("Failure due to: " + skippingMessage))); + } + + return Stream.of( + new Success<>( + new ThermalUnitInputEntityData( + assetInputEntityData.getFieldsToValues(), + assetInputEntityData.getTargetClass(), + assetInputEntityData.getOperatorInput(), + thermalBus.get()))); + } + + public Try, FailureException> buildThermalHouseInputEntities( + ThermalHouseInputFactory factory) throws SourceException { + Set thermalBuses = getThermalBuses(); + + return Try.scanCollection( + assetInputEntityDataStream(ThermalHouseInput.class, typeSource.getOperators()) + .flatMap( + assetInputEntityData -> + buildThermalUnitInputEntityData(assetInputEntityData, thermalBuses) + .map(factory::get)) + .collect(Collectors.toSet()), + ThermalHouseInput.class); + } + + public Try, FailureException> buildThermalHouseInputEntities( + ThermalHouseInputFactory factory, + Collection operators, + Collection thermalBuses) { + return Try.scanCollection( + assetInputEntityDataStream(ThermalHouseInput.class, operators) + .flatMap( + assetInputEntityData -> + buildThermalUnitInputEntityData(assetInputEntityData, thermalBuses) + .map(factory::get)) + .collect(Collectors.toSet()), + ThermalHouseInput.class); + } + + public Try, FailureException> buildCylindricalStorageInputEntities( + CylindricalStorageInputFactory factory) throws SourceException { + Set thermalBuses = getThermalBuses(); + + return Try.scanCollection( + assetInputEntityDataStream(CylindricalStorageInput.class, typeSource.getOperators()) + .flatMap( + assetInputEntityData -> + buildThermalUnitInputEntityData(assetInputEntityData, thermalBuses) + .map(factory::get)) + .collect(Collectors.toSet()), + CylindricalStorageInput.class); + } + + public Set> buildCylindricalStorageInputEntities( + CylindricalStorageInputFactory factory, + Collection operators, + Collection thermalBuses) { + return assetInputEntityDataStream(CylindricalStorageInput.class, operators) + .flatMap( + assetInputEntityData -> + buildThermalUnitInputEntityData(assetInputEntityData, thermalBuses) + .map(factory::get)) + .collect(Collectors.toSet()); + } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMappingSource.java b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMappingSource.java index 05f905728..09d4530f9 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMappingSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMappingSource.java @@ -5,23 +5,44 @@ */ package edu.ie3.datamodel.io.source; +import edu.ie3.datamodel.exceptions.FactoryException; +import edu.ie3.datamodel.io.factory.SimpleEntityData; +import edu.ie3.datamodel.io.factory.timeseries.TimeSeriesMappingFactory; import edu.ie3.datamodel.models.input.InputEntity; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.*; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** * This interface describes basic function to handle mapping between models and their respective * time series */ -public interface TimeSeriesMappingSource extends DataSource { +public abstract class TimeSeriesMappingSource { + + private final TimeSeriesMappingFactory mappingFactory; + + protected TimeSeriesMappingSource() { + this.mappingFactory = new TimeSeriesMappingFactory(); + } + /** * Get a mapping from model {@link UUID} to the time series {@link UUID} * * @return That mapping */ - Map getMapping(); + public Map getMapping() { + return getMappingSourceData() + .map(this::createMappingEntry) + .filter(Try::isSuccess) + .map(t -> (Success) t) + .map(Success::get) + .collect(Collectors.toMap(MappingEntry::getParticipant, MappingEntry::getTimeSeries)); + } /** * Get a time series identifier to a given model identifier @@ -29,24 +50,29 @@ public interface TimeSeriesMappingSource extends DataSource { * @param modelIdentifier Identifier of the model * @return An {@link Optional} to the time series identifier */ - default Optional getTimeSeriesUuid(UUID modelIdentifier) { + public Optional getTimeSeriesUuid(UUID modelIdentifier) { return Optional.ofNullable(getMapping().get(modelIdentifier)); } /** - * Get an option on the given time series meta information + * Extract a stream of maps from the database for the mapping * - * @param timeSeriesUuid Unique identifier of the time series in question - * @return An Option onto the meta information - * @deprecated since 3.0. Use {@link - * TimeSeriesMetaInformationSource#getTimeSeriesMetaInformation()} instead + * @return Stream of maps */ - @Deprecated(since = "3.0", forRemoval = true) - Optional - getTimeSeriesMetaInformation(UUID timeSeriesUuid); + public abstract Stream> getMappingSourceData(); + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + + private Try createMappingEntry( + Map fieldToValues) { + SimpleEntityData entityData = new SimpleEntityData(fieldToValues, MappingEntry.class); + return mappingFactory.get(entityData); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- /** Class to represent one entry within the participant to time series mapping */ - class MappingEntry extends InputEntity { + public static class MappingEntry extends InputEntity { private final UUID participant; private final UUID timeSeries; diff --git a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMetaInformationSource.java b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMetaInformationSource.java index b56221715..d97c032ba 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMetaInformationSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMetaInformationSource.java @@ -12,7 +12,7 @@ import java.util.UUID; /** Source for all available time series with their {@link UUID} and {@link ColumnScheme} */ -public interface TimeSeriesMetaInformationSource extends DataSource { +public interface TimeSeriesMetaInformationSource { /** * Get a mapping from time series {@link UUID} to its meta information {@link diff --git a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesSource.java b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesSource.java index 34b8c676a..dd863ba51 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesSource.java @@ -5,61 +5,51 @@ */ package edu.ie3.datamodel.io.source; +import edu.ie3.datamodel.exceptions.FactoryException; +import edu.ie3.datamodel.exceptions.SourceException; +import edu.ie3.datamodel.io.factory.timeseries.SimpleTimeBasedValueData; +import edu.ie3.datamodel.io.factory.timeseries.TimeBasedSimpleValueFactory; import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries; +import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue; import edu.ie3.datamodel.models.value.Value; +import edu.ie3.datamodel.utils.Try; import edu.ie3.util.interval.ClosedInterval; import java.time.ZonedDateTime; -import java.util.EnumSet; +import java.util.*; import java.util.Optional; /** * The interface definition of a source, that is able to provide one specific time series for one * model */ -public interface TimeSeriesSource extends DataSource { +public abstract class TimeSeriesSource { - /** - * Checks whether the given column scheme can be used with time series. - * - * @param scheme the column scheme to check - * @return whether the scheme is accepted or not - * @deprecated since 3.0. Use {@link - * TimeSeriesUtils#isSchemeAccepted(edu.ie3.datamodel.io.naming.timeseries.ColumnScheme)} - * instead. - */ - @Deprecated(since = "3.0", forRemoval = true) - static boolean isSchemeAccepted(edu.ie3.datamodel.io.csv.timeseries.ColumnScheme scheme) { - return EnumSet.of( - edu.ie3.datamodel.io.csv.timeseries.ColumnScheme.ACTIVE_POWER, - edu.ie3.datamodel.io.csv.timeseries.ColumnScheme.APPARENT_POWER, - edu.ie3.datamodel.io.csv.timeseries.ColumnScheme.ENERGY_PRICE, - edu.ie3.datamodel.io.csv.timeseries.ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND, - edu.ie3.datamodel.io.csv.timeseries.ColumnScheme.ACTIVE_POWER_AND_HEAT_DEMAND, - edu.ie3.datamodel.io.csv.timeseries.ColumnScheme.HEAT_DEMAND) - .contains(scheme); + protected Class valueClass; + protected TimeBasedSimpleValueFactory valueFactory; + + protected TimeSeriesSource(Class valueClass, TimeBasedSimpleValueFactory factory) { + this.valueFactory = factory; + this.valueClass = valueClass; } /** - * Obtain the full time series + * Build a {@link TimeBasedValue} of type {@code V}, whereas the underlying {@link Value} does not + * need any additional information. * - * @return the time series + * @param fieldToValues Mapping from field id to values + * @return {@link Try} of simple time based value */ - IndividualTimeSeries getTimeSeries(); + protected Try, FactoryException> createTimeBasedValue( + Map fieldToValues) { + SimpleTimeBasedValueData factoryData = + new SimpleTimeBasedValueData<>(fieldToValues, valueClass); + return valueFactory.get(factoryData); + } - /** - * Get the time series for the given time interval. If the interval is bigger than the time series - * itself, only the parts of the time series within the interval are handed back. - * - * @param timeInterval Desired time interval to cover - * @return The parts of of interest of the time series - */ - IndividualTimeSeries getTimeSeries(ClosedInterval timeInterval); + public abstract IndividualTimeSeries getTimeSeries(); - /** - * Get the time series value for a specific time - * - * @param time The queried time - * @return Option on a value for that time - */ - Optional getValue(ZonedDateTime time); + public abstract IndividualTimeSeries getTimeSeries(ClosedInterval timeInterval) + throws SourceException; + + public abstract Optional getValue(ZonedDateTime time) throws SourceException; } diff --git a/src/main/java/edu/ie3/datamodel/io/source/TypeSource.java b/src/main/java/edu/ie3/datamodel/io/source/TypeSource.java index aa1833952..a7412cedc 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/TypeSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/TypeSource.java @@ -5,11 +5,18 @@ */ package edu.ie3.datamodel.io.source; +import edu.ie3.datamodel.exceptions.SourceException; +import edu.ie3.datamodel.io.factory.input.OperatorInputFactory; +import edu.ie3.datamodel.io.factory.typeinput.LineTypeInputFactory; +import edu.ie3.datamodel.io.factory.typeinput.SystemParticipantTypeInputFactory; +import edu.ie3.datamodel.io.factory.typeinput.Transformer2WTypeInputFactory; +import edu.ie3.datamodel.io.factory.typeinput.Transformer3WTypeInputFactory; import edu.ie3.datamodel.models.input.OperatorInput; import edu.ie3.datamodel.models.input.connector.type.LineTypeInput; import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput; import edu.ie3.datamodel.models.input.connector.type.Transformer3WTypeInput; import edu.ie3.datamodel.models.input.system.type.*; +import edu.ie3.datamodel.utils.Try; import java.util.Set; /** @@ -20,7 +27,23 @@ * @version 0.1 * @since 08.04.20 */ -public interface TypeSource extends DataSource { +public class TypeSource extends EntitySource { + // factories + private final OperatorInputFactory operatorInputFactory; + private final Transformer2WTypeInputFactory transformer2WTypeInputFactory; + private final LineTypeInputFactory lineTypeInputFactory; + private final Transformer3WTypeInputFactory transformer3WTypeInputFactory; + private final SystemParticipantTypeInputFactory systemParticipantTypeInputFactory; + + public TypeSource(DataSource dataSource) { + this.dataSource = dataSource; + + this.operatorInputFactory = new OperatorInputFactory(); + this.transformer2WTypeInputFactory = new Transformer2WTypeInputFactory(); + this.lineTypeInputFactory = new LineTypeInputFactory(); + this.transformer3WTypeInputFactory = new Transformer3WTypeInputFactory(); + this.systemParticipantTypeInputFactory = new SystemParticipantTypeInputFactory(); + } /** * Returns a set of {@link Transformer2WTypeInput} instances. This set has to be unique in the @@ -31,7 +54,13 @@ public interface TypeSource extends DataSource { * * @return a set of object and uuid unique {@link Transformer2WTypeInput} entities */ - Set getTransformer2WTypes(); + public Set getTransformer2WTypes() throws SourceException { + return Try.scanCollection( + buildEntities(Transformer2WTypeInput.class, transformer2WTypeInputFactory), + Transformer2WTypeInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a set of {@link OperatorInput} instances. This set has to be unique in the sense of @@ -41,7 +70,12 @@ public interface TypeSource extends DataSource { * * @return a set of object and uuid unique {@link OperatorInput} entities */ - Set getOperators(); + public Set getOperators() throws SourceException { + return Try.scanCollection( + buildEntities(OperatorInput.class, operatorInputFactory), OperatorInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a set of {@link LineTypeInput} instances. This set has to be unique in the sense of @@ -51,7 +85,12 @@ public interface TypeSource extends DataSource { * * @return a set of object and uuid unique {@link LineTypeInput} entities */ - Set getLineTypes(); + public Set getLineTypes() throws SourceException { + return Try.scanCollection( + buildEntities(LineTypeInput.class, lineTypeInputFactory), LineTypeInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a set of {@link Transformer3WTypeInput} instances. This set has to be unique in the @@ -62,7 +101,13 @@ public interface TypeSource extends DataSource { * * @return a set of object and uuid unique {@link Transformer3WTypeInput} entities */ - Set getTransformer3WTypes(); + public Set getTransformer3WTypes() throws SourceException { + return Try.scanCollection( + buildEntities(Transformer3WTypeInput.class, transformer3WTypeInputFactory), + Transformer3WTypeInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a set of {@link BmTypeInput} instances. This set has to be unique in the sense of @@ -72,7 +117,12 @@ public interface TypeSource extends DataSource { * * @return a set of object and uuid unique {@link BmTypeInput} entities */ - Set getBmTypes(); + public Set getBmTypes() throws SourceException { + return Try.scanCollection( + buildEntities(BmTypeInput.class, systemParticipantTypeInputFactory), BmTypeInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a set of {@link ChpTypeInput} instances. This set has to be unique in the sense of @@ -82,7 +132,13 @@ public interface TypeSource extends DataSource { * * @return a set of object and uuid unique {@link ChpTypeInput} entities */ - Set getChpTypes(); + public Set getChpTypes() throws SourceException { + return Try.scanCollection( + buildEntities(ChpTypeInput.class, systemParticipantTypeInputFactory), + ChpTypeInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a set of {@link HpTypeInput} instances. This set has to be unique in the sense of @@ -92,7 +148,12 @@ public interface TypeSource extends DataSource { * * @return a set of object and uuid unique {@link HpTypeInput} entities */ - Set getHpTypes(); + public Set getHpTypes() throws SourceException { + return Try.scanCollection( + buildEntities(HpTypeInput.class, systemParticipantTypeInputFactory), HpTypeInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a set of {@link StorageTypeInput} instances. This set has to be unique in the sense of @@ -102,7 +163,13 @@ public interface TypeSource extends DataSource { * * @return a set of object and uuid unique {@link StorageTypeInput} entities */ - Set getStorageTypes(); + public Set getStorageTypes() throws SourceException { + return Try.scanCollection( + buildEntities(StorageTypeInput.class, systemParticipantTypeInputFactory), + StorageTypeInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a set of {@link WecTypeInput} instances. This set has to be unique in the sense of @@ -112,7 +179,13 @@ public interface TypeSource extends DataSource { * * @return a set of object and uuid unique {@link WecTypeInput} entities */ - Set getWecTypes(); + public Set getWecTypes() throws SourceException { + return Try.scanCollection( + buildEntities(WecTypeInput.class, systemParticipantTypeInputFactory), + WecTypeInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } /** * Returns a set of {@link EvTypeInput} instances. This set has to be unique in the sense of @@ -122,5 +195,10 @@ public interface TypeSource extends DataSource { * * @return a set of object and uuid unique {@link EvTypeInput} entities */ - Set getEvTypes(); + public Set getEvTypes() throws SourceException { + return Try.scanCollection( + buildEntities(EvTypeInput.class, systemParticipantTypeInputFactory), EvTypeInput.class) + .transformF(SourceException::new) + .getOrThrow(); + } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/WeatherSource.java b/src/main/java/edu/ie3/datamodel/io/source/WeatherSource.java index 89deca8d9..31f5150f0 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/WeatherSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/WeatherSource.java @@ -5,44 +5,121 @@ */ package edu.ie3.datamodel.io.source; +import edu.ie3.datamodel.exceptions.SourceException; +import edu.ie3.datamodel.io.factory.timeseries.TimeBasedWeatherValueData; +import edu.ie3.datamodel.io.factory.timeseries.TimeBasedWeatherValueFactory; import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries; import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue; import edu.ie3.datamodel.models.value.WeatherValue; +import edu.ie3.datamodel.utils.Try; import edu.ie3.util.interval.ClosedInterval; import java.time.ZonedDateTime; -import java.util.Collection; -import java.util.Map; -import java.util.Optional; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.locationtech.jts.geom.Point; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** Describes a data source for weather data */ -public interface WeatherSource extends DataSource { +/** Abstract class for WeatherSource by Csv and Sql Data */ +public abstract class WeatherSource { + + protected static final Logger log = LoggerFactory.getLogger(WeatherSource.class); + + protected TimeBasedWeatherValueFactory weatherFactory; + + protected Map> coordinateToTimeSeries; + + protected IdCoordinateSource idCoordinateSource; + + protected static final String COORDINATE_ID = "coordinateid"; + + protected WeatherSource( + IdCoordinateSource idCoordinateSource, TimeBasedWeatherValueFactory weatherFactory) { + this.idCoordinateSource = idCoordinateSource; + this.weatherFactory = weatherFactory; + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + + public abstract Map> getWeather( + ClosedInterval timeInterval) throws SourceException; + + public abstract Map> getWeather( + ClosedInterval timeInterval, Collection coordinates) + throws SourceException; + + public abstract Optional> getWeather( + ZonedDateTime date, Point coordinate) throws SourceException; + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- /** - * Return the weather for the given time interval + * Converts a field to value map into TimeBasedWeatherValueData, extracts the coordinate id from + * the field map and uses the {@link IdCoordinateSource} to map it to a point * - * @param timeInterval Queried time interval - * @return weather data for the specified time range, sorted by coordinate + * @param fieldMap the field to value map for one TimeBasedValue + * @return the TimeBasedWeatherValueData */ - Map> getWeather( - ClosedInterval timeInterval); + protected Optional toTimeBasedWeatherValueData( + Map fieldMap) { + String coordinateValue = fieldMap.remove(COORDINATE_ID); + fieldMap.putIfAbsent("uuid", UUID.randomUUID().toString()); + int coordinateId = Integer.parseInt(coordinateValue); + Optional coordinate = idCoordinateSource.getCoordinate(coordinateId); + if (coordinate.isEmpty()) { + log.warn("Unable to match coordinate ID {} to a point", coordinateId); + return Optional.empty(); + } + return Optional.of(new TimeBasedWeatherValueData(fieldMap, coordinate.get())); + } /** - * Return the weather for the given time interval AND coordinates + * Maps a collection of TimeBasedValues into time series for each contained coordinate point * - * @param timeInterval Queried time interval - * @param coordinates Queried coordinates - * @return weather data for the specified time range and coordinates, sorted by coordinate + * @param timeBasedValues the values to map + * @return a map of coordinate point to time series */ - Map> getWeather( - ClosedInterval timeInterval, Collection coordinates); + protected Map> mapWeatherValuesToPoints( + Collection> timeBasedValues) { + Map>> coordinateToValues = + timeBasedValues.stream() + .collect( + Collectors.groupingBy( + timeBasedWeatherValue -> timeBasedWeatherValue.getValue().getCoordinate(), + Collectors.toSet())); + Map> coordinateToTimeSeriesMap = new HashMap<>(); + for (Map.Entry>> entry : + coordinateToValues.entrySet()) { + Set> values = entry.getValue(); + IndividualTimeSeries timeSeries = new IndividualTimeSeries<>(null, values); + coordinateToTimeSeriesMap.put(entry.getKey(), timeSeries); + } + return coordinateToTimeSeriesMap; + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- /** - * Return the weather for the given time date AND coordinate + * Converts a stream of fields to value map into a TimeBasedValue, removes the "tid" * - * @param date Queried date time - * @param coordinate Queried coordinate - * @return weather data for the specified time and coordinate + * @param factory TimeBasedWeatherValueFactory + * @param inputStream stream of fields to convert into TimeBasedValues + * @return a list of that TimeBasedValues */ - Optional> getWeather(ZonedDateTime date, Point coordinate); + public List> buildTimeBasedValues( + TimeBasedWeatherValueFactory factory, Stream> inputStream) + throws SourceException { + return Try.scanStream( + inputStream.map( + fieldsToAttributes -> { + fieldsToAttributes.remove("tid"); + Optional data = + toTimeBasedWeatherValueData(fieldsToAttributes); + return factory.get(data.get()); + }), + "TimeBasedValue") + .transform(Stream::toList, SourceException::new) + .getOrThrow(); + } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/WholesalePriceSource.java b/src/main/java/edu/ie3/datamodel/io/source/WholesalePriceSource.java index 6ebc09176..df0563a17 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/WholesalePriceSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/WholesalePriceSource.java @@ -11,7 +11,7 @@ import java.time.ZonedDateTime; /** Describes a data source for wholesale prices */ -public interface WholesalePriceSource extends DataSource { +public interface WholesalePriceSource { /** * Return the whole sale price for the given time interval * diff --git a/src/main/java/edu/ie3/datamodel/io/source/couchbase/CouchbaseWeatherSource.java b/src/main/java/edu/ie3/datamodel/io/source/couchbase/CouchbaseWeatherSource.java index 128999f11..abfaca204 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/couchbase/CouchbaseWeatherSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/couchbase/CouchbaseWeatherSource.java @@ -30,49 +30,17 @@ import org.slf4j.LoggerFactory; /** Couchbase Source for weather data */ -public class CouchbaseWeatherSource implements WeatherSource { +public class CouchbaseWeatherSource extends WeatherSource { private static final Logger logger = LoggerFactory.getLogger(CouchbaseWeatherSource.class); private static final String DEFAULT_TIMESTAMP_PATTERN = "yyyy-MM-dd'T'HH:mm:ssxxx"; /** The start of the document key, comparable to a table name in relational databases */ private static final String DEFAULT_KEY_PREFIX = "weather"; - private final TimeBasedWeatherValueFactory weatherFactory; - private final String keyPrefix; private final CouchbaseConnector connector; - private final IdCoordinateSource coordinateSource; private final String coordinateIdColumnName; private final String timeStampPattern; - /** - * Instantiate a weather source utilising a connection to a couchbase instance obtained via the - * connector. This convenient constructor uses {@link CouchbaseWeatherSource#DEFAULT_KEY_PREFIX} - * as key prefix and {@link CouchbaseWeatherSource#DEFAULT_TIMESTAMP_PATTERN} as timestamp - * pattern. - * - * @param connector Connector, that establishes the connection to the couchbase instance - * @param coordinateSource Source to obtain actual coordinates from - * @param coordinateIdColumnName Name of the column containing the information about the - * coordinate identifier - * @param weatherFactory Factory to transfer field to value mapping into actual java object - * instances - * @deprecated Use {@link CouchbaseWeatherSource#CouchbaseWeatherSource(CouchbaseConnector, - * IdCoordinateSource, String, TimeBasedWeatherValueFactory, String)} instead - */ - @Deprecated(since = "3.0", forRemoval = true) - public CouchbaseWeatherSource( - CouchbaseConnector connector, - IdCoordinateSource coordinateSource, - String coordinateIdColumnName, - TimeBasedWeatherValueFactory weatherFactory) { - this( - connector, - coordinateSource, - coordinateIdColumnName, - weatherFactory, - DEFAULT_TIMESTAMP_PATTERN); - } - /** * Instantiate a weather source utilising a connection to a couchbase instance obtained via the * connector. This convenient constructor uses {@link CouchbaseWeatherSource#DEFAULT_KEY_PREFIX} @@ -101,41 +69,12 @@ public CouchbaseWeatherSource( timeStampPattern); } - /** - * Instantiate a weather source utilising a connection to a couchbase instance obtained via the - * connector. This convenient constructor uses {@link - * CouchbaseWeatherSource#DEFAULT_TIMESTAMP_PATTERN} as timestamp pattern. - * - * @param connector Connector, that establishes the connection to the couchbase instance - * @param coordinateSource Source to obtain actual coordinates from - * @param keyPrefix Prefix of entries, that belong to weather - * @param weatherFactory Factory to transfer field to value mapping into actual java object - * instances - * @deprecated Use {@link CouchbaseWeatherSource#CouchbaseWeatherSource(CouchbaseConnector, - * IdCoordinateSource, String, String, TimeBasedWeatherValueFactory, String)} instead - */ - @Deprecated(since = "3.0", forRemoval = true) - public CouchbaseWeatherSource( - CouchbaseConnector connector, - IdCoordinateSource coordinateSource, - String coordinateIdColumnName, - String keyPrefix, - TimeBasedWeatherValueFactory weatherFactory) { - this( - connector, - coordinateSource, - coordinateIdColumnName, - keyPrefix, - weatherFactory, - DEFAULT_TIMESTAMP_PATTERN); - } - /** * Instantiate a weather source utilising a connection to a couchbase instance obtained via the * connector * * @param connector Connector, that establishes the connection to the couchbase instance - * @param coordinateSource Source to obtain actual coordinates from + * @param idCoordinateSource Source to obtain actual coordinates from * @param coordinateIdColumnName Name of the column containing the information about the * coordinate identifier * @param keyPrefix Prefix of entries, that belong to weather @@ -145,16 +84,15 @@ public CouchbaseWeatherSource( */ public CouchbaseWeatherSource( CouchbaseConnector connector, - IdCoordinateSource coordinateSource, + IdCoordinateSource idCoordinateSource, String coordinateIdColumnName, String keyPrefix, TimeBasedWeatherValueFactory weatherFactory, String timeStampPattern) { + super(idCoordinateSource, weatherFactory); this.connector = connector; - this.coordinateSource = coordinateSource; this.coordinateIdColumnName = coordinateIdColumnName; this.keyPrefix = keyPrefix; - this.weatherFactory = weatherFactory; this.timeStampPattern = timeStampPattern; } @@ -164,7 +102,7 @@ public Map> getWeather( logger.warn( "By not providing coordinates you are forcing couchbase to check all possible coordinates one by one." + " This is not very performant. Please consider providing specific coordinates instead."); - return getWeather(timeInterval, coordinateSource.getAllCoordinates()); + return getWeather(timeInterval, idCoordinateSource.getAllCoordinates()); } @Override @@ -172,7 +110,7 @@ public Map> getWeather( ClosedInterval timeInterval, Collection coordinates) { HashMap> coordinateToTimeSeries = new HashMap<>(); for (Point coordinate : coordinates) { - Optional coordinateId = coordinateSource.getId(coordinate); + Optional coordinateId = idCoordinateSource.getId(coordinate); if (coordinateId.isPresent()) { String query = createQueryStringForIntervalAndCoordinate(timeInterval, coordinateId.get()); CompletableFuture futureResult = connector.query(query); @@ -200,7 +138,7 @@ public Map> getWeather( @Override public Optional> getWeather(ZonedDateTime date, Point coordinate) { - Optional coordinateId = coordinateSource.getId(coordinate); + Optional coordinateId = idCoordinateSource.getId(coordinate); if (coordinateId.isEmpty()) { logger.warn("Unable to match coordinate {} to a coordinate ID", coordinate); return Optional.empty(); @@ -266,7 +204,7 @@ public String createQueryStringForIntervalAndCoordinate( private Optional toTimeBasedWeatherValueData(JsonObject jsonObj) { Integer coordinateId = jsonObj.getInt(coordinateIdColumnName); jsonObj.removeKey(coordinateIdColumnName); - Optional coordinate = coordinateSource.getCoordinate(coordinateId); + Optional coordinate = idCoordinateSource.getCoordinate(coordinateId); if (coordinate.isEmpty()) { logger.warn("Unable to match coordinate ID {} to a coordinate", coordinateId); return Optional.empty(); @@ -294,7 +232,6 @@ public Optional> toTimeBasedWeatherValue(JsonObject logger.debug("The following json could not be parsed:\n{}", jsonObj); return Optional.empty(); } - TimeBasedValue timeBasedValue = weatherFactory.get(data.get()).orElse(null); - return Optional.ofNullable(timeBasedValue); + return weatherFactory.get(data.get()).getData(); } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java index 63bf51b5f..e5aabaaa3 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java @@ -5,24 +5,18 @@ */ package edu.ie3.datamodel.io.source.csv; +import edu.ie3.datamodel.exceptions.ConnectorException; import edu.ie3.datamodel.exceptions.SourceException; import edu.ie3.datamodel.io.connectors.CsvFileConnector; -import edu.ie3.datamodel.io.factory.EntityFactory; -import edu.ie3.datamodel.io.factory.SimpleEntityData; -import edu.ie3.datamodel.io.factory.input.AssetInputEntityData; -import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData; import edu.ie3.datamodel.io.naming.FileNamingStrategy; +import edu.ie3.datamodel.io.source.DataSource; import edu.ie3.datamodel.models.UniqueEntity; -import edu.ie3.datamodel.models.input.AssetInput; -import edu.ie3.datamodel.models.input.AssetTypeInput; -import edu.ie3.datamodel.models.input.NodeInput; -import edu.ie3.datamodel.models.input.OperatorInput; -import edu.ie3.datamodel.models.result.ResultEntity; import edu.ie3.datamodel.utils.validation.ValidationUtils; import edu.ie3.util.StringUtils; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.IOException; +import java.nio.file.Path; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; @@ -44,7 +38,7 @@ * @version 0.1 * @since 05.04.20 */ -public abstract class CsvDataSource { +public class CsvDataSource implements DataSource { protected static final Logger log = LoggerFactory.getLogger(CsvDataSource.class); @@ -52,14 +46,6 @@ public abstract class CsvDataSource { protected final String csvSep; protected final CsvFileConnector connector; - // field names - protected static final String OPERATOR = "operator"; - protected static final String NODE_A = "nodeA"; - protected static final String NODE_B = "nodeB"; - protected static final String NODE = "node"; - protected static final String TYPE = "type"; - protected static final String FIELDS_TO_VALUES_MAP = "fieldsToValuesMap"; - /** * @deprecated ensures downward compatibility with old csv data format. Can be removed when * support for old csv format is removed. * @@ -67,11 +53,24 @@ public abstract class CsvDataSource { @Deprecated(since = "1.1.0", forRemoval = true) private boolean notYetLoggedWarning = true; - protected CsvDataSource(String csvSep, String folderPath, FileNamingStrategy fileNamingStrategy) { + protected CsvDataSource(String csvSep, Path folderPath, FileNamingStrategy fileNamingStrategy) { this.csvSep = csvSep; this.connector = new CsvFileConnector(folderPath, fileNamingStrategy); } + @Override + public Stream> getSourceData(Class entityClass) { + return buildStreamWithFieldsToAttributesMap(entityClass, connector); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + + public BufferedReader createReader(Path filePath) throws FileNotFoundException { + return connector.initReader(filePath); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + /** * Takes a row string of a .csv file and a string array of the csv file headline, tries to split * the csv row string based and zip it together with the headline. This method does not contain @@ -83,7 +82,7 @@ protected CsvDataSource(String csvSep, String folderPath, FileNamingStrategy fil * @return a map containing the mapping of (fieldName to fieldValue) or an empty map if an error * occurred */ - private Map buildFieldsToAttributes( + protected Map buildFieldsToAttributes( final String csvRow, final String[] headline) { TreeMap insensitiveFieldsToAttributes = @@ -171,7 +170,7 @@ protected String[] parseCsvRow(String csvRow, String csvSep) { * @deprecated only left for downward compatibility. Will be removed in a major release */ @Deprecated(since = "1.1.0", forRemoval = true) - private String[] oldFieldVals(String csvSep, String csvRow) { + protected String[] oldFieldVals(String csvSep, String csvRow) { /*geo json support*/ final String geoJsonRegex = "\\{.+?}}}"; @@ -228,43 +227,6 @@ private List extractMatchingStrings(String regexString, String csvRow) { return matchingList; } - /** - * Returns either the first instance of a {@link OperatorInput} in the provided collection of or - * {@link OperatorInput#NO_OPERATOR_ASSIGNED} - * - * @param operators the collections of {@link OperatorInput}s that should be searched in - * @param operatorUuid the operator uuid that is requested - * @return either the first found instancen of {@link OperatorInput} or {@link - * OperatorInput#NO_OPERATOR_ASSIGNED} - */ - private OperatorInput getFirstOrDefaultOperator( - Collection operators, - String operatorUuid, - String entityClassName, - String requestEntityUuid) { - if (operatorUuid == null) { - log.warn( - "Input file for class '{}' is missing the 'operator' field. " - + "This is okay, but you should consider fixing the file by adding the field. " - + "Defaulting to 'NO OPERATOR ASSIGNED'", - entityClassName); - return OperatorInput.NO_OPERATOR_ASSIGNED; - } else { - return operatorUuid.trim().isEmpty() - ? OperatorInput.NO_OPERATOR_ASSIGNED - : findFirstEntityByUuid(operatorUuid, operators) - .orElseGet( - () -> { - log.debug( - "Cannot find operator with uuid '{}' for element '{}' and uuid '{}'. Defaulting to 'NO OPERATOR ASSIGNED'.", - operatorUuid, - entityClassName, - requestEntityUuid); - return OperatorInput.NO_OPERATOR_ASSIGNED; - }); - } - } - /** * Returns a predicate that can be used to filter optionals of {@link UniqueEntity}s and keep * track on the number of elements that have been empty optionals. This filter let only pass @@ -293,53 +255,6 @@ protected Predicate> isPresentCollectIfNot( }; } - protected void printInvalidElementInformation( - Class entityClass, LongAdder noOfInvalidElements) { - - log.error( - "{} entities of type '{}' are missing required elements!", - noOfInvalidElements, - entityClass.getSimpleName()); - } - - protected String saveMapGet(Map map, String key, String mapName) { - return Optional.ofNullable(map.get(key)) - .orElse( - "Key '" - + key - + "' not found" - + (mapName.isEmpty() ? "!" : " in map '" + mapName + "'!")); - } - - protected void logSkippingWarning( - String entityDesc, String entityUuid, String entityId, String missingElementsString) { - - log.warn( - "Skipping '{}' with uuid '{}' and id '{}'. Not all required entities found or map is missing entity key!\nMissing elements:\n{}", - entityDesc, - entityUuid, - entityId, - missingElementsString); - } - - /** - * Returns an {@link Optional} of the first {@link UniqueEntity} element of this collection - * matching the provided UUID or an empty {@code Optional} if no matching entity can be found. - * - * @param entityUuid uuid of the entity that should be looked for - * @param entities collection of entities that should be - * @param type of the entity that will be returned, derived from the provided collection - * @return either an optional containing the first entity that has the provided uuid or an empty - * optional if no matching entity with the provided uuid can be found - */ - protected Optional findFirstEntityByUuid( - String entityUuid, Collection entities) { - return entities.stream() - .parallel() - .filter(uniqueEntity -> uniqueEntity.getUuid().toString().equalsIgnoreCase(entityUuid)) - .findFirst(); - } - /** * Tries to open a file reader from the connector based on the provided entity class and hands it * over for further processing. @@ -354,7 +269,7 @@ protected Stream> buildStreamWithFieldsToAttributesMap( Class entityClass, CsvFileConnector connector) { try { return buildStreamWithFieldsToAttributesMap(entityClass, connector.initReader(entityClass)); - } catch (FileNotFoundException e) { + } catch (FileNotFoundException | ConnectorException e) { log.warn( "Unable to find file for entity '{}': {}", entityClass.getSimpleName(), e.getMessage()); } @@ -458,9 +373,9 @@ protected Set> distinctRowsWithLog( allRowsSet.stream().map(keyExtractor).collect(Collectors.joining(",\n")); log.error( """ - '{}' entities with duplicated {} key, but different field values found! Please review the corresponding input file! - Affected primary keys: - {}""", + '{}' entities with duplicated {} key, but different field values found! Please review the corresponding input file! + Affected primary keys: + {}""", entityDescriptor, keyDescriptor, affectedCoordinateIds); @@ -470,162 +385,4 @@ protected Set> distinctRowsWithLog( return allRowsSet; } - - /** - * Checks if the requested type of an asset can be found in the provided collection of types based - * on the provided fields to values mapping. The provided fields to values mapping needs to have - * one and only one field with key {@link #TYPE} and a corresponding UUID value. If the type can - * be found in the provided collection based on the UUID it is returned wrapped in an optional. - * Otherwise an empty optional is returned and a warning is logged. - * - * @param types a collection of types that should be used for searching - * @param fieldsToAttributes the field name to value mapping incl. the key {@link #TYPE} - * @param skippedClassString debug string of the class that will be skipping - * @param the type of the resulting type instance - * @return either an optional containing the type or an empty optional if the type cannot be found - */ - protected Optional getAssetType( - Collection types, Map fieldsToAttributes, String skippedClassString) { - - Optional assetType = - Optional.ofNullable(fieldsToAttributes.get(TYPE)) - .flatMap(typeUuid -> findFirstEntityByUuid(typeUuid, types)); - - // if the type is not present we return an empty element and - // log a warning - if (assetType.isEmpty()) { - logSkippingWarning( - skippedClassString, - saveMapGet(fieldsToAttributes, "uuid", FIELDS_TO_VALUES_MAP), - saveMapGet(fieldsToAttributes, "id", FIELDS_TO_VALUES_MAP), - TYPE + ": " + saveMapGet(fieldsToAttributes, TYPE, FIELDS_TO_VALUES_MAP)); - } - return assetType; - } - - /** - * Returns a stream of optional {@link AssetInputEntityData} that can be used to build instances - * of several subtypes of {@link UniqueEntity} by a corresponding {@link EntityFactory} that - * consumes this data. - * - * @param entityClass the entity class that should be build - * @param operators a collection of {@link OperatorInput} entities that should be used to build - * the data - * @param type of the entity that should be build - * @return stream of optionals of the entity data or empty optionals of the operator required for - * the data cannot be found - */ - protected Stream assetInputEntityDataStream( - Class entityClass, Collection operators) { - return buildStreamWithFieldsToAttributesMap(entityClass, connector) - .map( - fieldsToAttributes -> - assetInputEntityDataStream(entityClass, fieldsToAttributes, operators)); - } - - protected AssetInputEntityData assetInputEntityDataStream( - Class entityClass, - Map fieldsToAttributes, - Collection operators) { - - // get the operator of the entity - String operatorUuid = fieldsToAttributes.get(OPERATOR); - OperatorInput operator = - getFirstOrDefaultOperator( - operators, - operatorUuid, - entityClass.getSimpleName(), - saveMapGet(fieldsToAttributes, "uuid", FIELDS_TO_VALUES_MAP)); - - // remove fields that are passed as objects to constructor - fieldsToAttributes.keySet().removeAll(new HashSet<>(Collections.singletonList(OPERATOR))); - - return new AssetInputEntityData(fieldsToAttributes, entityClass, operator); - } - - /** - * Returns a stream of optional {@link NodeAssetInputEntityData} that can be used to build - * instances of several subtypes of {@link UniqueEntity} by a corresponding {@link EntityFactory} - * that consumes this data. param assetInputEntityDataStream - * - * @param assetInputEntityDataStream a stream consisting of {@link AssetInputEntityData} that is - * enriched with {@link NodeInput} data - * @param nodes a collection of {@link NodeInput} entities that should be used to build the data - * @return stream of optionals of the entity data or empty optionals of the node required for the - * data cannot be found - */ - protected Stream> nodeAssetInputEntityDataStream( - Stream assetInputEntityDataStream, Collection nodes) { - - return assetInputEntityDataStream - .parallel() - .map( - assetInputEntityData -> { - - // get the raw data - Map fieldsToAttributes = assetInputEntityData.getFieldsToValues(); - - // get the node of the entity - String nodeUuid = fieldsToAttributes.get(NODE); - Optional node = findFirstEntityByUuid(nodeUuid, nodes); - - // if the node is not present we return an empty element and - // log a warning - if (node.isEmpty()) { - logSkippingWarning( - assetInputEntityData.getTargetClass().getSimpleName(), - fieldsToAttributes.get("uuid"), - fieldsToAttributes.get("id"), - NODE + ": " + nodeUuid); - return Optional.empty(); - } - - // remove fields that are passed as objects to constructor - fieldsToAttributes.keySet().remove(NODE); - - return Optional.of( - new NodeAssetInputEntityData( - fieldsToAttributes, - assetInputEntityData.getTargetClass(), - assetInputEntityData.getOperatorInput(), - node.get())); - }); - } - - /** - * Returns a stream of optional entities that can be build by using {@link - * NodeAssetInputEntityData} and their corresponding factory. - * - * @param entityClass the entity class that should be build - * @param factory the factory that should be used for the building process - * @param nodes a collection of {@link NodeInput} entities that should be used to build the - * entities - * @param operators a collection of {@link OperatorInput} entities should be used to build the - * entities - * @param Type of the {@link AssetInput} to expect - * @return stream of optionals of the entities that has been built by the factor or empty - * optionals if the entity could not have been build - */ - protected Stream> nodeAssetEntityStream( - Class entityClass, - EntityFactory factory, - Collection nodes, - Collection operators) { - return nodeAssetInputEntityDataStream(assetInputEntityDataStream(entityClass, operators), nodes) - .map(dataOpt -> dataOpt.flatMap(factory::get)); - } - - /** - * Returns a stream of {@link SimpleEntityData} for result entity classes, using a - * fields-to-attributes map. - * - * @param entityClass the entity class that should be build - * @param Type of the {@link ResultEntity} to expect - * @return stream of {@link SimpleEntityData} - */ - protected Stream simpleEntityDataStream( - Class entityClass) { - return buildStreamWithFieldsToAttributesMap(entityClass, connector) - .map(fieldsToAttributes -> new SimpleEntityData(fieldsToAttributes, entityClass)); - } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvGraphicSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvGraphicSource.java deleted file mode 100644 index ec4eaa14e..000000000 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvGraphicSource.java +++ /dev/null @@ -1,238 +0,0 @@ -/* - * © 2021. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.source.csv; - -import edu.ie3.datamodel.io.factory.input.graphics.LineGraphicInputEntityData; -import edu.ie3.datamodel.io.factory.input.graphics.LineGraphicInputFactory; -import edu.ie3.datamodel.io.factory.input.graphics.NodeGraphicInputEntityData; -import edu.ie3.datamodel.io.factory.input.graphics.NodeGraphicInputFactory; -import edu.ie3.datamodel.io.naming.FileNamingStrategy; -import edu.ie3.datamodel.io.source.GraphicSource; -import edu.ie3.datamodel.io.source.RawGridSource; -import edu.ie3.datamodel.io.source.TypeSource; -import edu.ie3.datamodel.models.UniqueEntity; -import edu.ie3.datamodel.models.input.NodeInput; -import edu.ie3.datamodel.models.input.OperatorInput; -import edu.ie3.datamodel.models.input.connector.LineInput; -import edu.ie3.datamodel.models.input.connector.type.LineTypeInput; -import edu.ie3.datamodel.models.input.container.GraphicElements; -import edu.ie3.datamodel.models.input.graphics.LineGraphicInput; -import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.LongAdder; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -/** - * Implementation of the {@link GraphicSource} interface to read {@link NodeGraphicInput} and {@link - * LineGraphicInput} entities from .csv files - * - * @version 0.1 - * @since 08.04.20 - */ -public class CsvGraphicSource extends CsvDataSource implements GraphicSource { - - // general fields - private final TypeSource typeSource; - private final RawGridSource rawGridSource; - - // factories - private final LineGraphicInputFactory lineGraphicInputFactory; - private final NodeGraphicInputFactory nodeGraphicInputFactory; - - public CsvGraphicSource( - String csvSep, - String folderPath, - FileNamingStrategy fileNamingStrategy, - TypeSource typeSource, - RawGridSource rawGridSource) { - super(csvSep, folderPath, fileNamingStrategy); - this.typeSource = typeSource; - this.rawGridSource = rawGridSource; - - // init factories - this.lineGraphicInputFactory = new LineGraphicInputFactory(); - this.nodeGraphicInputFactory = new NodeGraphicInputFactory(); - } - - /** {@inheritDoc} */ - @Override - public Optional getGraphicElements() { - - // read all needed entities - /// start with types and operators - Set operators = typeSource.getOperators(); - Set lineTypes = typeSource.getLineTypes(); - - Set nodes = rawGridSource.getNodes(operators); - Set lines = rawGridSource.getLines(nodes, lineTypes, operators); - - // start with the entities needed for a GraphicElements entity - /// as we want to return a working grid, keep an eye on empty optionals - ConcurrentHashMap, LongAdder> nonBuildEntities = - new ConcurrentHashMap<>(); - - Set nodeGraphics = - buildNodeGraphicEntityData(nodes) - .map(dataOpt -> dataOpt.flatMap(nodeGraphicInputFactory::get)) - .filter(isPresentCollectIfNot(NodeGraphicInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - - Set lineGraphics = - buildLineGraphicEntityData(lines) - .map(dataOpt -> dataOpt.flatMap(lineGraphicInputFactory::get)) - .filter(isPresentCollectIfNot(LineGraphicInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - - // if we found invalid elements return an empty optional and log the problems - if (!nonBuildEntities.isEmpty()) { - nonBuildEntities.forEach(this::printInvalidElementInformation); - return Optional.empty(); - } - - // if everything is fine, return a GraphicElements instance - return Optional.of(new GraphicElements(nodeGraphics, lineGraphics)); - } - /** {@inheritDoc} */ - @Override - public Set getNodeGraphicInput() { - return getNodeGraphicInput(rawGridSource.getNodes(typeSource.getOperators())); - } - - /** - * {@inheritDoc} - * - *

If the set of {@link NodeInput} entities is not exhaustive for all available {@link - * NodeGraphicInput} entities or if an error during the building process occurs, all entities that - * has been able to be built are returned and the not-built ones are ignored (= filtered out). - */ - @Override - public Set getNodeGraphicInput(Set nodes) { - return buildNodeGraphicEntityData(nodes) - .map(dataOpt -> dataOpt.flatMap(nodeGraphicInputFactory::get)) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - /** {@inheritDoc} */ - @Override - public Set getLineGraphicInput() { - Set operators = typeSource.getOperators(); - return getLineGraphicInput( - rawGridSource.getLines( - rawGridSource.getNodes(operators), typeSource.getLineTypes(), operators)); - } - - /** - * {@inheritDoc} - * - *

If the set of {@link LineInput} entities is not exhaustive for all available {@link - * LineGraphicInput} entities or if an error during the building process occurs, all entities that - * has been able to be built are returned and the not-built ones are ignored (= filtered out). - */ - @Override - public Set getLineGraphicInput(Set lines) { - return buildLineGraphicEntityData(lines) - .map(dataOpt -> dataOpt.flatMap(lineGraphicInputFactory::get)) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - /** - * Builds a stream of {@link NodeGraphicInputEntityData} instances that can be consumed by a - * {@link NodeGraphicInputFactory} to build instances of {@link NodeGraphicInput} entities. This - * method depends on corresponding instances of {@link NodeInput} entities that are represented by - * a corresponding {@link NodeGraphicInput} entity. The determination of matching {@link - * NodeInput} and {@link NodeGraphicInput} entities is carried out by the UUID of the {@link - * NodeInput} entity. Hence it is crucial to only pass over collections that are pre-checked for - * the uniqueness of the UUIDs of the nodes they contain. No further sanity checks are included in - * this method. If no UUID of a {@link NodeInput} entity can be found for a {@link - * NodeGraphicInputEntityData} instance, an empty optional is included in the stream and warning - * is logged. - * - * @param nodes a set of nodes with unique uuids - * @return a stream of optional {@link NodeGraphicInput} entities - */ - private Stream> buildNodeGraphicEntityData( - Set nodes) { - return buildStreamWithFieldsToAttributesMap(NodeGraphicInput.class, connector) - .map(fieldsToAttributes -> buildNodeGraphicEntityData(fieldsToAttributes, nodes)); - } - - private Optional buildNodeGraphicEntityData( - Map fieldsToAttributes, Set nodes) { - - // get the node of the entity - String nodeUuid = fieldsToAttributes.get(NODE); - Optional node = findFirstEntityByUuid(nodeUuid, nodes); - - // if the node is not present we return an empty element and - // log a warning - if (node.isEmpty()) { - logSkippingWarning( - NodeGraphicInput.class.getSimpleName(), - fieldsToAttributes.get("uuid"), - "no id (graphic entities don't have one)", - NODE + ": " + nodeUuid); - return Optional.empty(); - } - - // remove fields that are passed as objects to constructor - fieldsToAttributes.keySet().remove(NODE); - - return Optional.of(new NodeGraphicInputEntityData(fieldsToAttributes, node.get())); - } - - /** - * Builds a stream of {@link LineGraphicInputEntityData} instances that can be consumed by a - * {@link LineGraphicInputFactory} to build instances of {@link LineGraphicInput} entities. This - * method depends on corresponding instances of {@link LineInput} entities that are represented by - * a corresponding {@link LineGraphicInput} entity. The determination of matching {@link - * LineInput} and {@link LineGraphicInput} entities is carried out by the UUID of the {@link - * LineInput} entity. Hence it is crucial to only pass over collections that are pre-checked for - * the uniqueness of the UUIDs of the nodes they contain. No further sanity checks are included in - * this method. If no UUID of a {@link LineInput} entity can be found for a {@link - * LineGraphicInputEntityData} instance, an empty optional is included in the stream and warning - * is logged. - * - * @param lines a set of lines with unique uuids - * @return a stream of optional {@link LineGraphicInput} entities - */ - private Stream> buildLineGraphicEntityData( - Set lines) { - return buildStreamWithFieldsToAttributesMap(LineGraphicInput.class, connector) - .map(fieldsToAttributes -> buildLineGraphicEntityData(fieldsToAttributes, lines)); - } - - private Optional buildLineGraphicEntityData( - Map fieldsToAttributes, Set lines) { - - // get the node of the entity - String lineUuid = fieldsToAttributes.get("line"); - Optional line = findFirstEntityByUuid(lineUuid, lines); - - // if the node is not present we return an empty element and - // log a warning - if (line.isEmpty()) { - logSkippingWarning( - LineGraphicInput.class.getSimpleName(), - fieldsToAttributes.get("uuid"), - "no id (graphic entities don't have one)", - "line: " + lineUuid); - return Optional.empty(); - } - - // remove fields that are passed as objects to constructor - fieldsToAttributes.keySet().remove("line"); - - return Optional.of(new LineGraphicInputEntityData(fieldsToAttributes, line.get())); - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSource.java index 0b9748616..d7f293f0f 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSource.java @@ -5,40 +5,53 @@ */ package edu.ie3.datamodel.io.source.csv; +import edu.ie3.datamodel.exceptions.SourceException; import edu.ie3.datamodel.io.factory.SimpleFactoryData; import edu.ie3.datamodel.io.factory.timeseries.IdCoordinateFactory; -import edu.ie3.datamodel.io.naming.FileNamingStrategy; import edu.ie3.datamodel.io.source.IdCoordinateSource; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.util.geo.CoordinateDistance; +import edu.ie3.util.geo.GeoUtils; import java.io.BufferedReader; import java.io.IOException; import java.util.*; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.measure.quantity.Length; import org.apache.commons.lang3.tuple.Pair; +import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Point; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import tech.units.indriya.ComparableQuantity; +import tech.units.indriya.quantity.Quantities; +import tech.units.indriya.unit.Units; /** * Implementation of {@link IdCoordinateSource} to read the mapping between coordinate id and actual * coordinate from csv file and build a mapping from it. */ -public class CsvIdCoordinateSource extends CsvDataSource implements IdCoordinateSource { - private final IdCoordinateFactory factory; +public class CsvIdCoordinateSource implements IdCoordinateSource { + + protected static final Logger log = LoggerFactory.getLogger(CsvIdCoordinateSource.class); + + private static final String COORDINATE_ID_MAPPING = "coordinate id mapping"; + /** Mapping in both ways (id -> coordinate) and (coordinate -> id) have to be unique */ private final Map idToCoordinate; private final Map coordinateToId; - public CsvIdCoordinateSource( - String csvSep, - String folderPath, - FileNamingStrategy fileNamingStrategy, - IdCoordinateFactory factory) { - super(csvSep, folderPath, fileNamingStrategy); + private final CsvDataSource dataSource; + private final IdCoordinateFactory factory; + public CsvIdCoordinateSource(IdCoordinateFactory factory, CsvDataSource dataSource) + throws SourceException { this.factory = factory; + this.dataSource = dataSource; - /* setup the coordinate id to lat/long mapping */ + /* set up the coordinate id to lat/long mapping */ idToCoordinate = setupIdToCoordinateMap(); coordinateToId = invert(idToCoordinate); } @@ -48,12 +61,15 @@ public CsvIdCoordinateSource( * * @return Mapping from coordinate id to coordinate */ - private Map setupIdToCoordinateMap() { - return buildStreamWithFieldsToAttributesMap() - .map(fieldToValues -> new SimpleFactoryData(fieldToValues, Pair.class)) - .map(factory::get) - .flatMap(Optional::stream) - .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + private Map setupIdToCoordinateMap() throws SourceException { + return Try.scanStream( + buildStreamWithFieldsToAttributesMap() + .map(fieldToValues -> new SimpleFactoryData(fieldToValues, Pair.class)) + .map(factory::get), + "Pair") + .transform( + s -> s.collect(Collectors.toMap(Pair::getKey, Pair::getValue)), SourceException::new) + .getOrThrow(); } /** @@ -93,6 +109,46 @@ public Collection getAllCoordinates() { return coordinateToId.keySet(); } + @Override + public List getNearestCoordinates(Point coordinate, int n) { + Set points = coordinateToId.keySet(); + + if (idToCoordinate.size() > n) { + ArrayList foundPoints = new ArrayList<>(); + ComparableQuantity distance = Quantities.getQuantity(10000, Units.METRE); + + // extends the search radius until n points are found + while (foundPoints.size() < n) { + foundPoints.clear(); + distance = distance.multiply(2); + + Envelope envelope = GeoUtils.calculateBoundingBox(coordinate, distance); + points.stream() + .filter(point -> envelope.contains(point.getCoordinate())) + .forEach(foundPoints::add); + } + + // replaces all point with smaller size of found points + points.clear(); + points.addAll(foundPoints); + } + + return calculateCoordinateDistances(coordinate, n, points); + } + + @Override + public List getClosestCoordinates( + Point coordinate, int n, ComparableQuantity distance) { + Set points = coordinateToId.keySet(); + + Envelope envelope = GeoUtils.calculateBoundingBox(coordinate, distance); + Set reducedPoints = + points.stream() + .filter(point -> envelope.contains(point.getCoordinate())) + .collect(Collectors.toSet()); + return calculateCoordinateDistances(coordinate, n, reducedPoints); + } + public int getCoordinateCount() { return idToCoordinate.keySet().size(); } @@ -103,31 +159,33 @@ public int getCoordinateCount() { * @return Stream with mappings from field identifiers to attributes */ protected Stream> buildStreamWithFieldsToAttributesMap() { - try (BufferedReader reader = connector.initIdCoordinateReader()) { - final String[] headline = parseCsvRow(reader.readLine(), csvSep); + try (BufferedReader reader = dataSource.connector.initIdCoordinateReader()) { + final String[] headline = dataSource.parseCsvRow(reader.readLine(), dataSource.csvSep); // by default try-with-resources closes the reader directly when we leave this method (which // is wanted to avoid a lock on the file), but this causes a closing of the stream as well. // As we still want to consume the data at other places, we start a new stream instead of // returning the original one - Collection> allRows = csvRowFieldValueMapping(reader, headline); + Collection> allRows = + dataSource.csvRowFieldValueMapping(reader, headline); Function, String> idExtractor = fieldToValues -> fieldToValues.get(factory.getIdField()); Set> withDistinctCoordinateId = - distinctRowsWithLog(allRows, idExtractor, "coordinate id mapping", "coordinate id"); + dataSource.distinctRowsWithLog( + allRows, idExtractor, COORDINATE_ID_MAPPING, "coordinate id"); Function, String> coordinateExtractor = fieldToValues -> fieldToValues .get(factory.getLatField()) .concat(fieldToValues.get(factory.getLonField())); - return distinctRowsWithLog( - withDistinctCoordinateId, coordinateExtractor, "coordinate id mapping", "coordinate") + return dataSource + .distinctRowsWithLog( + withDistinctCoordinateId, coordinateExtractor, COORDINATE_ID_MAPPING, "coordinate") .parallelStream(); } catch (IOException e) { log.error("Cannot read the file for coordinate id to coordinate mapping.", e); } - return Stream.empty(); } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvJointGridContainerSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvJointGridContainerSource.java index ba369f4ac..2fa9e0f2a 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvJointGridContainerSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvJointGridContainerSource.java @@ -5,52 +5,75 @@ */ package edu.ie3.datamodel.io.source.csv; +import edu.ie3.datamodel.exceptions.FileException; +import edu.ie3.datamodel.exceptions.InvalidGridException; import edu.ie3.datamodel.exceptions.SourceException; +import edu.ie3.datamodel.io.naming.DefaultDirectoryHierarchy; +import edu.ie3.datamodel.io.naming.EntityPersistenceNamingStrategy; import edu.ie3.datamodel.io.naming.FileNamingStrategy; import edu.ie3.datamodel.io.source.*; import edu.ie3.datamodel.models.input.container.GraphicElements; import edu.ie3.datamodel.models.input.container.JointGridContainer; import edu.ie3.datamodel.models.input.container.RawGridElements; import edu.ie3.datamodel.models.input.container.SystemParticipants; +import edu.ie3.datamodel.utils.Try; +import java.nio.file.Path; +import java.util.List; /** Convenience class for cases where all used data comes from CSV sources */ public class CsvJointGridContainerSource { private CsvJointGridContainerSource() {} - public static JointGridContainer read(String gridName, String csvSep, String directoryPath) - throws SourceException { + public static JointGridContainer read( + String gridName, String csvSep, Path directoryPath, boolean isHierarchic) + throws SourceException, FileException, InvalidGridException { /* Parameterization */ + FileNamingStrategy namingStrategy; - FileNamingStrategy namingStrategy = new FileNamingStrategy(); // Default naming strategy + if (isHierarchic) { + // Hierarchic structure + DefaultDirectoryHierarchy fileHierarchy = + new DefaultDirectoryHierarchy(directoryPath, gridName); + namingStrategy = new FileNamingStrategy(new EntityPersistenceNamingStrategy(), fileHierarchy); + fileHierarchy.validate(); + } else { + // Flat structure + namingStrategy = new FileNamingStrategy(); + } + + CsvDataSource dataSource = new CsvDataSource(csvSep, directoryPath, namingStrategy); /* Instantiating sources */ - TypeSource typeSource = new CsvTypeSource(csvSep, directoryPath, namingStrategy); - RawGridSource rawGridSource = - new CsvRawGridSource(csvSep, directoryPath, namingStrategy, typeSource); - ThermalSource thermalSource = - new CsvThermalSource(csvSep, directoryPath, namingStrategy, typeSource); + TypeSource typeSource = new TypeSource(dataSource); + RawGridSource rawGridSource = new RawGridSource(typeSource, dataSource); + ThermalSource thermalSource = new ThermalSource(typeSource, dataSource); SystemParticipantSource systemParticipantSource = - new CsvSystemParticipantSource( - csvSep, directoryPath, namingStrategy, typeSource, thermalSource, rawGridSource); - GraphicSource graphicsSource = - new CsvGraphicSource(csvSep, directoryPath, namingStrategy, typeSource, rawGridSource); + new SystemParticipantSource(typeSource, thermalSource, rawGridSource, dataSource); + GraphicSource graphicSource = new GraphicSource(typeSource, rawGridSource, dataSource); /* Loading models */ - RawGridElements rawGridElements = - rawGridSource - .getGridData() - .orElseThrow(() -> new SourceException("Error during reading of raw grid data.")); - SystemParticipants systemParticipants = - systemParticipantSource - .getSystemParticipants() - .orElseThrow( - () -> new SourceException("Error during reading of system participant data.")); - GraphicElements graphicElements = - graphicsSource - .getGraphicElements() - .orElseThrow(() -> new SourceException("Error during reading of graphic elements.")); - - return new JointGridContainer(gridName, rawGridElements, systemParticipants, graphicElements); + Try rawGridElements = + Try.of(rawGridSource::getGridData, SourceException.class); + Try systemParticipants = + Try.of(systemParticipantSource::getSystemParticipants, SourceException.class); + Try graphicElements = + Try.of(graphicSource::getGraphicElements, SourceException.class); + + List exceptions = + Try.getExceptions(List.of(rawGridElements, systemParticipants, graphicElements)); + + if (!exceptions.isEmpty()) { + throw new SourceException( + exceptions.size() + " error(s) occurred while reading sources. ", exceptions); + } else { + // getOrThrow should not throw an exception in this context, because all exception are + // filtered and thrown before + return new JointGridContainer( + gridName, + rawGridElements.getOrThrow(), + systemParticipants.getOrThrow(), + graphicElements.getOrThrow()); + } } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvRawGridSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvRawGridSource.java deleted file mode 100644 index 83a50cdc7..000000000 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvRawGridSource.java +++ /dev/null @@ -1,547 +0,0 @@ -/* - * © 2021. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.source.csv; - -import edu.ie3.datamodel.io.factory.EntityFactory; -import edu.ie3.datamodel.io.factory.input.*; -import edu.ie3.datamodel.io.naming.FileNamingStrategy; -import edu.ie3.datamodel.io.source.RawGridSource; -import edu.ie3.datamodel.io.source.TypeSource; -import edu.ie3.datamodel.models.UniqueEntity; -import edu.ie3.datamodel.models.input.*; -import edu.ie3.datamodel.models.input.connector.*; -import edu.ie3.datamodel.models.input.connector.type.LineTypeInput; -import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput; -import edu.ie3.datamodel.models.input.connector.type.Transformer3WTypeInput; -import edu.ie3.datamodel.models.input.container.RawGridElements; -import java.util.*; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.LongAdder; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -/** - * Source that provides the capability to build entities that are hold by a {@link RawGridElements} - * as well as the {@link RawGridElements} container from .csv files. - * - *

This source is not buffered which means each call on a getter method always tries to - * read all data is necessary to return the requested objects in a hierarchical cascading way. - * - *

If performance is an issue, it is recommended to read the data cascading starting with reading - * nodes and then using the getters with arguments to avoid reading the same data multiple times. - * - *

The resulting sets are always unique on object and UUID base (with distinct UUIDs). - * - * @version 0.1 - * @since 03.04.20 - */ -public class CsvRawGridSource extends CsvDataSource implements RawGridSource { - - // general fields - private final TypeSource typeSource; - - // factories - private final NodeInputFactory nodeInputFactory; - private final LineInputFactory lineInputFactory; - private final Transformer2WInputFactory transformer2WInputFactory; - private final Transformer3WInputFactory transformer3WInputFactory; - private final SwitchInputFactory switchInputFactory; - private final MeasurementUnitInputFactory measurementUnitInputFactory; - - public CsvRawGridSource( - String csvSep, - String gridFolderPath, - FileNamingStrategy fileNamingStrategy, - TypeSource typeSource) { - super(csvSep, gridFolderPath, fileNamingStrategy); - this.typeSource = typeSource; - - // init factories - this.nodeInputFactory = new NodeInputFactory(); - this.lineInputFactory = new LineInputFactory(); - this.transformer2WInputFactory = new Transformer2WInputFactory(); - this.transformer3WInputFactory = new Transformer3WInputFactory(); - this.switchInputFactory = new SwitchInputFactory(); - this.measurementUnitInputFactory = new MeasurementUnitInputFactory(); - } - - /** {@inheritDoc} */ - @Override - public Optional getGridData() { - - /* read all needed entities start with the types and operators */ - Set operators = typeSource.getOperators(); - Set lineTypes = typeSource.getLineTypes(); - Set transformer2WTypeInputs = typeSource.getTransformer2WTypes(); - Set transformer3WTypeInputs = typeSource.getTransformer3WTypes(); - - /* assets */ - Set nodes = getNodes(operators); - - /* start with the entities needed for a RawGridElement as we want to return a working grid, keep an eye on empty - * optionals which is equal to elements that have been unable to be built e.g. due to missing elements they depend - * on - */ - ConcurrentHashMap, LongAdder> nonBuildEntities = - new ConcurrentHashMap<>(); - - Set lineInputs = - typedEntityStream(LineInput.class, lineInputFactory, nodes, operators, lineTypes) - .filter(isPresentCollectIfNot(LineInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - Set transformer2WInputs = - typedEntityStream( - Transformer2WInput.class, - transformer2WInputFactory, - nodes, - operators, - transformer2WTypeInputs) - .filter(isPresentCollectIfNot(Transformer2WInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - Set transformer3WInputs = - transformer3WEntityStream(nodes, transformer3WTypeInputs, operators) - .filter(isPresentCollectIfNot(Transformer3WInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - Set switches = - untypedConnectorInputEntityStream(SwitchInput.class, switchInputFactory, nodes, operators) - .filter(isPresentCollectIfNot(SwitchInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - Set measurementUnits = - nodeAssetEntityStream( - MeasurementUnitInput.class, measurementUnitInputFactory, nodes, operators) - .filter(isPresentCollectIfNot(MeasurementUnitInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - - /* if we found non-build elements return an empty optional and log the problems */ - if (!nonBuildEntities.isEmpty()) { - nonBuildEntities.forEach(this::printInvalidElementInformation); - return Optional.empty(); - } - - /* build the grid */ - RawGridElements gridElements = - new RawGridElements( - nodes, - lineInputs, - transformer2WInputs, - transformer3WInputs, - switches, - measurementUnits); - - /* return the grid if it is not empty */ - return gridElements.allEntitiesAsList().isEmpty() - ? Optional.empty() - : Optional.of(gridElements); - } - - /** {@inheritDoc} */ - @Override - public Set getNodes() { - return getNodes(typeSource.getOperators()); - } - - /** - * {@inheritDoc} - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getNodes(Set operators) { - return assetInputEntityDataStream(NodeInput.class, operators) - .map(nodeInputFactory::get) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - /** {@inheritDoc} */ - @Override - public Set getLines() { - Set operators = typeSource.getOperators(); - return getLines(getNodes(operators), typeSource.getLineTypes(), operators); - } - - /** - * {@inheritDoc} - * - *

If one of the sets of {@link NodeInput} or {@link LineTypeInput} entities is not exhaustive - * for all available {@link LineInput} entities (e.g. a {@link NodeInput} or {@link LineTypeInput} - * entity is missing) or if an error during the building process occurs, the entity that misses - * something will be skipped (which can be seen as a filtering functionality) but all entities - * that are able to be built will be returned anyway and the elements that couldn't have been - * built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getLines( - Set nodes, Set lineTypeInputs, Set operators) { - return typedEntityStream(LineInput.class, lineInputFactory, nodes, operators, lineTypeInputs) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - /** {@inheritDoc} */ - @Override - public Set get2WTransformers() { - Set operators = typeSource.getOperators(); - return get2WTransformers(getNodes(operators), typeSource.getTransformer2WTypes(), operators); - } - - /** - * {@inheritDoc} - * - *

If one of the sets of {@link NodeInput} or {@link Transformer2WTypeInput} entities is not - * exhaustive for all available {@link Transformer2WInput} entities (e.g. a {@link NodeInput} or - * {@link Transformer2WTypeInput} entity is missing) or if an error during the building process - * occurs, the entity that misses something will be skipped (which can be seen as a filtering - * functionality) but all entities that are able to be built will be returned anyway and the - * elements that couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set get2WTransformers( - Set nodes, - Set transformer2WTypes, - Set operators) { - return typedEntityStream( - Transformer2WInput.class, - transformer2WInputFactory, - nodes, - operators, - transformer2WTypes) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - /** {@inheritDoc} */ - @Override - public Set get3WTransformers() { - Set operators = typeSource.getOperators(); - return get3WTransformers(getNodes(operators), typeSource.getTransformer3WTypes(), operators); - } - - /** - * {@inheritDoc} - * - *

If one of the sets of {@link NodeInput} or {@link Transformer3WTypeInput} entities is not - * exhaustive for all available {@link Transformer3WInput} entities (e.g. a {@link NodeInput} or - * {@link Transformer3WTypeInput} entity is missing) or if an error during the building process - * occurs, the entity that misses something will be skipped (which can be seen as a filtering - * functionality) but all entities that are able to be built will be returned anyway and the - * elements that couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set get3WTransformers( - Set nodes, - Set transformer3WTypeInputs, - Set operators) { - return transformer3WEntityStream(nodes, transformer3WTypeInputs, operators) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - private Stream> transformer3WEntityStream( - Set nodes, - Set transformer3WTypeInputs, - Set operators) { - - return buildTransformer3WEntityData( - buildTypedConnectorEntityData( - buildUntypedConnectorInputEntityData( - assetInputEntityDataStream(Transformer3WInput.class, operators), nodes), - transformer3WTypeInputs), - nodes) - .map(dataOpt -> dataOpt.flatMap(transformer3WInputFactory::get)); - } - - /** {@inheritDoc} */ - @Override - public Set getSwitches() { - Set operators = typeSource.getOperators(); - return getSwitches(getNodes(operators), operators); - } - - /** - * {@inheritDoc} - * - *

If one of the sets of {@link NodeInput} entities is not exhaustive for all available {@link - * SwitchInput} entities (e.g. a {@link NodeInput} entity is missing) or if an error during the - * building process occurs, the entity that misses something will be skipped (which can be seen as - * a filtering functionality) but all entities that are able to be built will be returned anyway - * and the elements that couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getSwitches(Set nodes, Set operators) { - return untypedConnectorInputEntityStream( - SwitchInput.class, switchInputFactory, nodes, operators) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - private Stream> untypedConnectorInputEntityStream( - Class entityClass, - EntityFactory factory, - Set nodes, - Set operators) { - - return buildUntypedConnectorInputEntityData( - assetInputEntityDataStream(entityClass, operators), nodes) - .map(dataOpt -> dataOpt.flatMap(factory::get)); - } - - /** {@inheritDoc} */ - @Override - public Set getMeasurementUnits() { - Set operators = typeSource.getOperators(); - return getMeasurementUnits(getNodes(operators), operators); - } - - /** - * {@inheritDoc} - * - *

If one of the sets of {@link NodeInput} entities is not exhaustive for all available {@link - * MeasurementUnitInput} entities (e.g. a {@link NodeInput} entity is missing) or if an error - * during the building process occurs, the entity that misses something will be skipped (which can - * be seen as a filtering functionality) but all entities that are able to be built will be - * returned anyway and the elements that couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getMeasurementUnits( - Set nodes, Set operators) { - return nodeAssetEntityStream( - MeasurementUnitInput.class, measurementUnitInputFactory, nodes, operators) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - private - Stream> typedEntityStream( - Class entityClass, - EntityFactory> factory, - Collection nodes, - Collection operators, - Collection types) { - - return buildTypedConnectorEntityData( - buildUntypedConnectorInputEntityData( - assetInputEntityDataStream(entityClass, operators), nodes), - types) - .map(dataOpt -> dataOpt.flatMap(factory::get)); - } - - /** - * Converts a stream of {@link AssetInputEntityData} in connection with a collection of known - * {@link NodeInput}s to a stream of {@link ConnectorInputEntityData}. - * - * @param assetInputEntityDataStream Input stream of {@link AssetInputEntityData} - * @param nodes A collection of known nodes - * @return A stream on option to matching {@link ConnectorInputEntityData} - */ - private Stream> buildUntypedConnectorInputEntityData( - Stream assetInputEntityDataStream, Collection nodes) { - return assetInputEntityDataStream - .parallel() - .map( - assetInputEntityData -> - buildUntypedConnectorInputEntityData(assetInputEntityData, nodes)); - } - - /** - * Converts a single given {@link AssetInputEntityData} in connection with a collection of known - * {@link NodeInput}s to {@link ConnectorInputEntityData}. If this is not possible, an empty - * option is given back. - * - * @param assetInputEntityData Input entity data to convert - * @param nodes A collection of known nodes - * @return An option to matching {@link ConnectorInputEntityData} - */ - private Optional buildUntypedConnectorInputEntityData( - AssetInputEntityData assetInputEntityData, Collection nodes) { - // get the raw data - Map fieldsToAttributes = assetInputEntityData.getFieldsToValues(); - - // get the two connector nodes - String nodeAUuid = fieldsToAttributes.get(NODE_A); - String nodeBUuid = fieldsToAttributes.get(NODE_B); - Optional nodeA = findFirstEntityByUuid(nodeAUuid, nodes); - Optional nodeB = findFirstEntityByUuid(nodeBUuid, nodes); - - // if nodeA or nodeB are not present we return an empty element and log a - // warning - if (nodeA.isEmpty() || nodeB.isEmpty()) { - String debugString = - Stream.of( - new AbstractMap.SimpleEntry<>(nodeA, NODE_A + ": " + nodeAUuid), - new AbstractMap.SimpleEntry<>(nodeB, NODE_B + ": " + nodeBUuid)) - .filter(entry -> entry.getKey().isEmpty()) - .map(AbstractMap.SimpleEntry::getValue) - .collect(Collectors.joining("\n")); - - logSkippingWarning( - assetInputEntityData.getTargetClass().getSimpleName(), - fieldsToAttributes.get("uuid"), - fieldsToAttributes.get("id"), - debugString); - return Optional.empty(); - } - - // remove fields that are passed as objects to constructor - fieldsToAttributes.keySet().removeAll(new HashSet<>(Arrays.asList(NODE_A, NODE_B))); - - return Optional.of( - new ConnectorInputEntityData( - fieldsToAttributes, - assetInputEntityData.getTargetClass(), - assetInputEntityData.getOperatorInput(), - nodeA.get(), - nodeB.get())); - } - - /** - * Enriches the given untyped entity data with the equivalent asset type. If this is not possible, - * an empty Optional is returned - * - * @param noTypeConnectorEntityDataStream Stream of untyped entity data - * @param availableTypes Yet available asset types - * @param Type of the asset type - * @return Stream of option to enhanced data - */ - private - Stream>> buildTypedConnectorEntityData( - Stream> noTypeConnectorEntityDataStream, - Collection availableTypes) { - return noTypeConnectorEntityDataStream - .parallel() - .map( - noTypeEntityDataOpt -> - noTypeEntityDataOpt.flatMap( - noTypeEntityData -> findAndAddType(noTypeEntityData, availableTypes))); - } - - /** - * Finds the required asset type and if present, adds it to the untyped entity data - * - * @param untypedEntityData Untyped entity data to enrich - * @param availableTypes Yet available asset types - * @param Type of the asset type - * @return Option to enhanced data - */ - private Optional> findAndAddType( - ConnectorInputEntityData untypedEntityData, Collection availableTypes) { - Optional assetTypeOption = - getAssetType( - availableTypes, - untypedEntityData.getFieldsToValues(), - untypedEntityData.getClass().getSimpleName()); - return assetTypeOption.map(assetType -> addTypeToEntityData(untypedEntityData, assetType)); - } - - /** - * Enriches the given, untyped entity data with the provided asset type - * - * @param untypedEntityData Untyped entity data to enrich - * @param assetType Asset type to add - * @param Type of the asset type - * @return The enriched entity data - */ - private TypedConnectorInputEntityData addTypeToEntityData( - ConnectorInputEntityData untypedEntityData, T assetType) { - Map fieldsToAttributes = untypedEntityData.getFieldsToValues(); - - // remove fields that are passed as objects to constructor - fieldsToAttributes.keySet().remove(TYPE); - - // build result object - return new TypedConnectorInputEntityData<>( - fieldsToAttributes, - untypedEntityData.getTargetClass(), - untypedEntityData.getOperatorInput(), - untypedEntityData.getNodeA(), - untypedEntityData.getNodeB(), - assetType); - } - - /** - * Enriches the Stream of options on {@link Transformer3WInputEntityData} with the information of - * the internal node - * - * @param typedConnectorEntityDataStream Stream of already typed input entity data - * @param nodes Yet available nodes - * @return A stream of options on enriched data - */ - private Stream> buildTransformer3WEntityData( - Stream>> - typedConnectorEntityDataStream, - Collection nodes) { - return typedConnectorEntityDataStream - .parallel() - .map( - typedEntityDataOpt -> - typedEntityDataOpt.flatMap(typeEntityData -> addThirdNode(typeEntityData, nodes))); - } - - /** - * Enriches the third node to the already typed entity data of a three winding transformer. If no - * matching node can be found, return an empty Optional. - * - * @param typeEntityData Already typed entity data - * @param nodes Yet available nodes - * @return An option to the enriched data - */ - private Optional addThirdNode( - TypedConnectorInputEntityData typeEntityData, - Collection nodes) { - - // get the raw data - Map fieldsToAttributes = typeEntityData.getFieldsToValues(); - - // get nodeC of the transformer - String nodeCUuid = fieldsToAttributes.get("nodeC"); - Optional nodeC = findFirstEntityByUuid(nodeCUuid, nodes); - - // if nodeC is not present we return an empty element and - // log a warning - if (nodeC.isEmpty()) { - logSkippingWarning( - typeEntityData.getTargetClass().getSimpleName(), - fieldsToAttributes.get("uuid"), - fieldsToAttributes.get("id"), - "nodeC: " + nodeCUuid); - return Optional.empty(); - } - - // remove fields that are passed as objects to constructor - fieldsToAttributes.keySet().remove("nodeC"); - - return Optional.of( - new Transformer3WInputEntityData( - fieldsToAttributes, - typeEntityData.getTargetClass(), - typeEntityData.getOperatorInput(), - typeEntityData.getNodeA(), - typeEntityData.getNodeB(), - nodeC.get(), - typeEntityData.getType())); - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvResultEntitySource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvResultEntitySource.java deleted file mode 100644 index 1c6c44a9a..000000000 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvResultEntitySource.java +++ /dev/null @@ -1,189 +0,0 @@ -/* - * © 2021. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.source.csv; - -import edu.ie3.datamodel.io.factory.SimpleEntityFactory; -import edu.ie3.datamodel.io.factory.result.*; -import edu.ie3.datamodel.io.naming.FileNamingStrategy; -import edu.ie3.datamodel.io.source.ResultEntitySource; -import edu.ie3.datamodel.models.result.NodeResult; -import edu.ie3.datamodel.models.result.ResultEntity; -import edu.ie3.datamodel.models.result.connector.LineResult; -import edu.ie3.datamodel.models.result.connector.SwitchResult; -import edu.ie3.datamodel.models.result.connector.Transformer2WResult; -import edu.ie3.datamodel.models.result.connector.Transformer3WResult; -import edu.ie3.datamodel.models.result.system.*; -import edu.ie3.datamodel.models.result.thermal.CylindricalStorageResult; -import edu.ie3.datamodel.models.result.thermal.ThermalHouseResult; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; - -/** - * Source that provides the capability to build entities of type {@link ResultEntity} container from - * .csv files. - * - *

This source is not buffered which means each call on a getter method always tries to - * read all data is necessary to return the requested objects in a hierarchical cascading way. - * - *

The resulting sets are always unique on object and UUID base (with distinct UUIDs). - * - * @version 0.1 - * @since 22 June 2021 - */ -public class CsvResultEntitySource extends CsvDataSource implements ResultEntitySource { - - private final SystemParticipantResultFactory systemParticipantResultFactory; - private final ThermalResultFactory thermalResultFactory; - private final SwitchResultFactory switchResultFactory; - private final NodeResultFactory nodeResultFactory; - private final ConnectorResultFactory connectorResultFactory; - private final FlexOptionsResultFactory flexOptionsResultFactory; - - public CsvResultEntitySource( - String csvSep, String folderPath, FileNamingStrategy fileNamingStrategy) { - super(csvSep, folderPath, fileNamingStrategy); - - // init factories - this.systemParticipantResultFactory = new SystemParticipantResultFactory(); - this.thermalResultFactory = new ThermalResultFactory(); - this.switchResultFactory = new SwitchResultFactory(); - this.nodeResultFactory = new NodeResultFactory(); - this.connectorResultFactory = new ConnectorResultFactory(); - this.flexOptionsResultFactory = new FlexOptionsResultFactory(); - } - - public CsvResultEntitySource( - String csvSep, String folderPath, FileNamingStrategy fileNamingStrategy, String dtfPattern) { - super(csvSep, folderPath, fileNamingStrategy); - - // init factories - this.systemParticipantResultFactory = new SystemParticipantResultFactory(dtfPattern); - this.thermalResultFactory = new ThermalResultFactory(dtfPattern); - this.switchResultFactory = new SwitchResultFactory(dtfPattern); - this.nodeResultFactory = new NodeResultFactory(dtfPattern); - this.connectorResultFactory = new ConnectorResultFactory(dtfPattern); - this.flexOptionsResultFactory = new FlexOptionsResultFactory(dtfPattern); - } - - // Grid - @Override - public Set getNodeResults() { - return getResultEntities(NodeResult.class, nodeResultFactory); - } - - @Override - public Set getSwitchResults() { - return getResultEntities(SwitchResult.class, switchResultFactory); - } - - @Override - public Set getLineResults() { - return getResultEntities(LineResult.class, connectorResultFactory); - } - - @Override - public Set getTransformer2WResultResults() { - return getResultEntities(Transformer2WResult.class, connectorResultFactory); - } - - @Override - public Set getTransformer3WResultResults() { - return getResultEntities(Transformer3WResult.class, connectorResultFactory); - } - - // System Participants - @Override - public Set getLoadResults() { - return getResultEntities(LoadResult.class, systemParticipantResultFactory); - } - - @Override - public Set getPvResults() { - return getResultEntities(PvResult.class, systemParticipantResultFactory); - } - - @Override - public Set getFixedFeedInResults() { - return getResultEntities(FixedFeedInResult.class, systemParticipantResultFactory); - } - - @Override - public Set getBmResults() { - return getResultEntities(BmResult.class, systemParticipantResultFactory); - } - - @Override - public Set getChpResults() { - return getResultEntities(ChpResult.class, systemParticipantResultFactory); - } - - @Override - public Set getWecResults() { - return getResultEntities(WecResult.class, systemParticipantResultFactory); - } - - @Override - public Set getStorageResults() { - return getResultEntities(StorageResult.class, systemParticipantResultFactory); - } - - @Override - public Set getEvcsResults() { - return getResultEntities(EvcsResult.class, systemParticipantResultFactory); - } - - @Override - public Set getEvResults() { - return getResultEntities(EvResult.class, systemParticipantResultFactory); - } - - @Override - public Set getHpResults() { - return getResultEntities(HpResult.class, systemParticipantResultFactory); - } - - @Override - public Set getThermalHouseResults() { - return getResultEntities(ThermalHouseResult.class, thermalResultFactory); - } - - @Override - public Set getCylindricalStorageResult() { - return getResultEntities(CylindricalStorageResult.class, thermalResultFactory); - } - - @Override - public Set getEmResults() { - return getResultEntities(EmResult.class, systemParticipantResultFactory); - } - - @Override - public Set getFlexOptionsResults() { - return getResultEntities(FlexOptionsResult.class, flexOptionsResultFactory); - } - - private Set getResultEntities( - Class entityClass, SimpleEntityFactory factory) { - return simpleEntityDataStream(entityClass) - .map( - entityData -> - factory.get(entityData).flatMap(loadResult -> cast(entityClass, loadResult))) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - private Optional cast( - Class entityClass, ResultEntity resultEntity) { - if (resultEntity.getClass().equals(entityClass)) { - // safe here as a) type is checked and b) csv data stream already filters non-fitting input - // data - return Optional.of(entityClass.cast(resultEntity)); - } else { - return Optional.empty(); - } - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvSystemParticipantSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvSystemParticipantSource.java deleted file mode 100644 index 58b0e5c71..000000000 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvSystemParticipantSource.java +++ /dev/null @@ -1,768 +0,0 @@ -/* - * © 2021. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.source.csv; - -import edu.ie3.datamodel.io.factory.EntityFactory; -import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData; -import edu.ie3.datamodel.io.factory.input.participant.*; -import edu.ie3.datamodel.io.naming.FileNamingStrategy; -import edu.ie3.datamodel.io.source.RawGridSource; -import edu.ie3.datamodel.io.source.SystemParticipantSource; -import edu.ie3.datamodel.io.source.ThermalSource; -import edu.ie3.datamodel.io.source.TypeSource; -import edu.ie3.datamodel.models.UniqueEntity; -import edu.ie3.datamodel.models.input.NodeInput; -import edu.ie3.datamodel.models.input.OperatorInput; -import edu.ie3.datamodel.models.input.container.SystemParticipants; -import edu.ie3.datamodel.models.input.system.*; -import edu.ie3.datamodel.models.input.system.type.*; -import edu.ie3.datamodel.models.input.thermal.ThermalBusInput; -import edu.ie3.datamodel.models.input.thermal.ThermalStorageInput; -import java.util.*; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.LongAdder; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -/** - * Source that provides the capability to build entities of type {@link SystemParticipantInput} as - * well as {@link SystemParticipants} container from .csv files. - * - *

This source is not buffered which means each call on a getter method always tries to - * read all data is necessary to return the requested objects in a hierarchical cascading way. - * - *

If performance is an issue, it is recommended to read the data cascading starting with reading - * nodes and then using the getters with arguments to avoid reading the same data multiple times. - * - *

The resulting sets are always unique on object and UUID base (with distinct UUIDs). - * - * @version 0.1 - * @since 03.04.20 - */ -public class CsvSystemParticipantSource extends CsvDataSource implements SystemParticipantSource { - - private static final String THERMAL_STORAGE = "thermalstorage"; - private static final String THERMAL_BUS = "thermalbus"; - - // general fields - private final TypeSource typeSource; - private final RawGridSource rawGridSource; - private final ThermalSource thermalSource; - - // factories - private final BmInputFactory bmInputFactory; - private final ChpInputFactory chpInputFactory; - private final EvInputFactory evInputFactory; - private final FixedFeedInInputFactory fixedFeedInInputFactory; - private final HpInputFactory hpInputFactory; - private final LoadInputFactory loadInputFactory; - private final PvInputFactory pvInputFactory; - private final StorageInputFactory storageInputFactory; - private final WecInputFactory wecInputFactory; - private final EvcsInputFactory evcsInputFactory; - private final EmInputFactory emInputFactory; - - public CsvSystemParticipantSource( - String csvSep, - String participantsFolderPath, - FileNamingStrategy fileNamingStrategy, - TypeSource typeSource, - ThermalSource thermalSource, - RawGridSource rawGridSource) { - super(csvSep, participantsFolderPath, fileNamingStrategy); - this.typeSource = typeSource; - this.rawGridSource = rawGridSource; - this.thermalSource = thermalSource; - - // init factories - this.bmInputFactory = new BmInputFactory(); - this.chpInputFactory = new ChpInputFactory(); - this.evInputFactory = new EvInputFactory(); - this.fixedFeedInInputFactory = new FixedFeedInInputFactory(); - this.hpInputFactory = new HpInputFactory(); - this.loadInputFactory = new LoadInputFactory(); - this.pvInputFactory = new PvInputFactory(); - this.storageInputFactory = new StorageInputFactory(); - this.wecInputFactory = new WecInputFactory(); - this.evcsInputFactory = new EvcsInputFactory(); - this.emInputFactory = new EmInputFactory(); - } - - /** {@inheritDoc} */ - @Override - public Optional getSystemParticipants() { - - // read all needed entities - /// start with types and operators - Set operators = typeSource.getOperators(); - Set bmTypes = typeSource.getBmTypes(); - Set chpTypes = typeSource.getChpTypes(); - Set evTypes = typeSource.getEvTypes(); - Set hpTypes = typeSource.getHpTypes(); - Set storageTypes = typeSource.getStorageTypes(); - Set wecTypes = typeSource.getWecTypes(); - - /// go on with the thermal assets - Set thermalBuses = thermalSource.getThermalBuses(operators); - Set thermalStorages = - thermalSource.getThermalStorages(operators, thermalBuses); - - /// go on with the nodes - Set nodes = rawGridSource.getNodes(operators); - - // start with the entities needed for SystemParticipants container - /// as we want to return a working grid, keep an eye on empty optionals which is equal to - // elements that - /// have been unable to be built e.g. due to missing elements they depend on - ConcurrentHashMap, LongAdder> nonBuildEntities = - new ConcurrentHashMap<>(); - - Set fixedFeedInInputs = - nodeAssetEntityStream(FixedFeedInInput.class, fixedFeedInInputFactory, nodes, operators) - .filter(isPresentCollectIfNot(FixedFeedInInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - Set pvInputs = - nodeAssetEntityStream(PvInput.class, pvInputFactory, nodes, operators) - .filter(isPresentCollectIfNot(PvInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - Set loads = - nodeAssetEntityStream(LoadInput.class, loadInputFactory, nodes, operators) - .filter(isPresentCollectIfNot(LoadInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - Set bmInputs = - typedEntityStream(BmInput.class, bmInputFactory, nodes, operators, bmTypes) - .filter(isPresentCollectIfNot(BmInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - Set storages = - typedEntityStream(StorageInput.class, storageInputFactory, nodes, operators, storageTypes) - .filter(isPresentCollectIfNot(StorageInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - Set wecInputs = - typedEntityStream(WecInput.class, wecInputFactory, nodes, operators, wecTypes) - .filter(isPresentCollectIfNot(WecInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - Set evs = - typedEntityStream(EvInput.class, evInputFactory, nodes, operators, evTypes) - .filter(isPresentCollectIfNot(EvInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - Set evcs = - nodeAssetEntityStream(EvcsInput.class, evcsInputFactory, nodes, operators) - .filter(isPresentCollectIfNot(EvcsInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - Set chpInputs = - chpInputStream(nodes, operators, chpTypes, thermalBuses, thermalStorages) - .filter(isPresentCollectIfNot(ChpInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - Set hpInputs = - hpInputStream(nodes, operators, hpTypes, thermalBuses) - .filter(isPresentCollectIfNot(HpInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - Set emInputs = - nodeAssetEntityStream(EmInput.class, emInputFactory, nodes, operators) - .filter(isPresentCollectIfNot(EmInput.class, nonBuildEntities)) - .map(Optional::get) - .collect(Collectors.toSet()); - - // if we found invalid elements return an empty optional and log the problems - if (!nonBuildEntities.isEmpty()) { - nonBuildEntities.forEach(this::printInvalidElementInformation); - return Optional.empty(); - } - - // if everything is fine, return a system participants container - return Optional.of( - new SystemParticipants( - bmInputs, - chpInputs, - evcs, - evs, - fixedFeedInInputs, - hpInputs, - loads, - pvInputs, - storages, - wecInputs, - emInputs)); - } - - /** {@inheritDoc} */ - @Override - public Set getFixedFeedIns() { - Set operators = typeSource.getOperators(); - return getFixedFeedIns(rawGridSource.getNodes(operators), operators); - } - /** - * {@inheritDoc} - * - *

If the set of {@link NodeInput} entities is not exhaustive for all available {@link - * FixedFeedInInput} entities (e.g. a {@link NodeInput} entity is missing) or if an error during - * the building process occurs, the entity that misses something will be skipped (which can be - * seen as a filtering functionality), but all entities that are able to be built will be returned - * anyway and the elements that couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getFixedFeedIns(Set nodes, Set operators) { - return nodeAssetEntityStream(FixedFeedInInput.class, fixedFeedInInputFactory, nodes, operators) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - /** {@inheritDoc} */ - @Override - public Set getPvPlants() { - Set operators = typeSource.getOperators(); - return getPvPlants(rawGridSource.getNodes(operators), operators); - } - - /** - * {@inheritDoc} - * - *

If the set of {@link NodeInput} entities is not exhaustive for all available {@link PvInput} - * entities (e.g. a {@link NodeInput} entity is missing) or if an error during the building - * process occurs, the entity that misses something will be skipped (which can be seen as a - * filtering functionality), but all entities that are able to be built will be returned anyway - * and the elements that couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getPvPlants(Set nodes, Set operators) { - return nodeAssetEntityStream(PvInput.class, pvInputFactory, nodes, operators) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - /** {@inheritDoc} */ - @Override - public Set getLoads() { - Set operators = typeSource.getOperators(); - return getLoads(rawGridSource.getNodes(operators), operators); - } - - /** - * {@inheritDoc} - * - *

If the set of {@link NodeInput} entities is not exhaustive for all available {@link - * LoadInput} entities (e.g. a {@link NodeInput} entity is missing) or if an error during the - * building process occurs, the entity that misses something will be skipped (which can be seen as - * a filtering functionality), but all entities that are able to be built will be returned anyway - * and the elements that couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getLoads(Set nodes, Set operators) { - return nodeAssetEntityStream(LoadInput.class, loadInputFactory, nodes, operators) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - /** {@inheritDoc} */ - @Override - public Set getEvCS() { - Set operators = typeSource.getOperators(); - return getEvCS(rawGridSource.getNodes(operators), operators); - } - - /** - * {@inheritDoc} - * - *

If the set of {@link NodeInput} entities is not exhaustive for all available {@link - * EvcsInput} entities (e.g. a {@link NodeInput} entity is missing) or if an error during the - * building process occurs, the entity that misses something will be skipped (which can be seen as - * a filtering functionality), but all entities that are able to be built will be returned anyway - * and the elements that couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getEvCS(Set nodes, Set operators) { - return nodeAssetEntityStream(EvcsInput.class, evcsInputFactory, nodes, operators) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - /** {@inheritDoc} */ - @Override - public Set getBmPlants() { - Set operators = typeSource.getOperators(); - return getBmPlants(rawGridSource.getNodes(operators), operators, typeSource.getBmTypes()); - } - - /** - * {@inheritDoc} - * - *

If one of the sets of {@link NodeInput} or {@link BmTypeInput} entities is not exhaustive - * for all available {@link BmInput} entities (e.g. a {@link NodeInput} or {@link BmTypeInput} - * entity is missing) or if an error during the building process occurs, the entity that misses - * something will be skipped (which can be seen as a filtering functionality) but all entities - * that are able to be built will be returned anyway and the elements that couldn't have been - * built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getBmPlants( - Set nodes, Set operators, Set types) { - return typedEntityStream(BmInput.class, bmInputFactory, nodes, operators, types) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - /** {@inheritDoc} */ - @Override - public Set getStorages() { - Set operators = typeSource.getOperators(); - return getStorages(rawGridSource.getNodes(operators), operators, typeSource.getStorageTypes()); - } - - /** - * {@inheritDoc} - * - *

If one of the sets of {@link NodeInput} or {@link StorageTypeInput} entities is not - * exhaustive for all available {@link StorageInput} entities (e.g. a {@link NodeInput} or {@link - * StorageTypeInput} entity is missing) or if an error during the building process occurs, the - * entity that misses something will be skipped (which can be seen as a filtering functionality) - * but all entities that are able to be built will be returned anyway and the elements that - * couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getStorages( - Set nodes, Set operators, Set types) { - return typedEntityStream(StorageInput.class, storageInputFactory, nodes, operators, types) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - /** {@inheritDoc} */ - @Override - public Set getWecPlants() { - Set operators = typeSource.getOperators(); - return getWecPlants(rawGridSource.getNodes(operators), operators, typeSource.getWecTypes()); - } - - /** - * {@inheritDoc} - * - *

If one of the sets of {@link NodeInput} or {@link WecTypeInput} entities is not exhaustive - * for all available {@link WecInput} entities (e.g. a {@link NodeInput} or {@link WecTypeInput} - * entity is missing) or if an error during the building process occurs, the entity that misses - * something will be skipped (which can be seen as a filtering functionality) but all entities - * that are able to be built will be returned anyway and the elements that couldn't have been - * built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getWecPlants( - Set nodes, Set operators, Set types) { - return typedEntityStream(WecInput.class, wecInputFactory, nodes, operators, types) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - /** {@inheritDoc} */ - @Override - public Set getEvs() { - Set operators = typeSource.getOperators(); - return getEvs(rawGridSource.getNodes(operators), operators, typeSource.getEvTypes()); - } - - /** - * {@inheritDoc} - * - *

If one of the sets of {@link NodeInput} or {@link EvTypeInput} entities is not exhaustive - * for all available {@link EvInput} entities (e.g. a {@link NodeInput} or {@link EvTypeInput} - * entity is missing) or if an error during the building process occurs, the entity that misses - * something will be skipped (which can be seen as a filtering functionality) but all entities - * that are able to be built will be returned anyway and the elements that couldn't have been - * built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getEvs( - Set nodes, Set operators, Set types) { - return typedEntityStream(EvInput.class, evInputFactory, nodes, operators, types) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - /** - * Constructs a stream of {@link SystemParticipantInput} entities wrapped in {@link Optional}s. - * - * @param entityClass the class of the entities that should be built - * @param factory the corresponding factory that is capable of building this entities - * @param nodes the nodes that should be considered for these entities - * @param operators the operators that should be considered for these entities - * @param types the types that should be considered for these entities - * @param the type of the resulting entity - * @param the type of the type model of the resulting entity - * @return a stream of optionals being either empty or holding an instance of a {@link - * SystemParticipantInput} of the requested entity class - */ - private - Stream> typedEntityStream( - Class entityClass, - EntityFactory> factory, - Set nodes, - Set operators, - Set types) { - return buildTypedEntityData( - nodeAssetInputEntityDataStream( - assetInputEntityDataStream(entityClass, operators), nodes), - types) - .map(dataOpt -> dataOpt.flatMap(factory::get)); - } - /** {@inheritDoc} */ - @Override - public Set getChpPlants() { - Set operators = typeSource.getOperators(); - Set thermalBuses = thermalSource.getThermalBuses(operators); - return getChpPlants( - rawGridSource.getNodes(operators), - operators, - typeSource.getChpTypes(), - thermalBuses, - thermalSource.getThermalStorages(operators, thermalBuses)); - } - - /** - * {@inheritDoc} - * - *

If one of the sets of {@link NodeInput}, {@link ThermalBusInput}, {@link - * ThermalStorageInput} or {@link ChpTypeInput} entities is not exhaustive for all available - * {@link ChpInput} entities (e.g. a {@link NodeInput} or {@link ChpTypeInput} entity is missing) - * or if an error during the building process occurs, the entity that misses something will be - * skipped (which can be seen as a filtering functionality) but all entities that are able to be - * built will be returned anyway and the elements that couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getChpPlants( - Set nodes, - Set operators, - Set types, - Set thermalBuses, - Set thermalStorages) { - - return chpInputStream(nodes, operators, types, thermalBuses, thermalStorages) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - private Stream> chpInputStream( - Set nodes, - Set operators, - Set types, - Set thermalBuses, - Set thermalStorages) { - return buildChpEntityData( - buildTypedEntityData( - nodeAssetInputEntityDataStream( - assetInputEntityDataStream(ChpInput.class, operators), nodes), - types), - thermalStorages, - thermalBuses) - .map(dataOpt -> dataOpt.flatMap(chpInputFactory::get)); - } - /** {@inheritDoc} */ - @Override - public Set getHeatPumps() { - Set operators = typeSource.getOperators(); - return getHeatPumps( - rawGridSource.getNodes(operators), - operators, - typeSource.getHpTypes(), - thermalSource.getThermalBuses()); - } - - /** - * {@inheritDoc} - * - *

If one of the sets of {@link NodeInput}, {@link ThermalBusInput} or {@link HpTypeInput} - * entities is not exhaustive for all available {@link HpInput} entities (e.g. a {@link NodeInput} - * or {@link HpTypeInput} entity is missing) or if an error during the building process occurs, - * the entity that misses something will be skipped (which can be seen as a filtering - * functionality) but all entities that are able to be built will be returned anyway and the - * elements that couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getHeatPumps( - Set nodes, - Set operators, - Set types, - Set thermalBuses) { - return hpInputStream(nodes, operators, types, thermalBuses) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - private Stream> hpInputStream( - Set nodes, - Set operators, - Set types, - Set thermalBuses) { - return buildHpEntityData( - buildTypedEntityData( - nodeAssetInputEntityDataStream( - assetInputEntityDataStream(HpInput.class, operators), nodes), - types), - thermalBuses) - .map(dataOpt -> dataOpt.flatMap(hpInputFactory::get)); - } - - /** - * Enriches a given stream of {@link NodeAssetInputEntityData} optionals with a type of {@link - * SystemParticipantTypeInput} based on the provided collection of types and the fields to values - * mapping that inside the already provided {@link NodeAssetInputEntityData} instance. - * - * @param nodeAssetEntityDataStream the data stream of {@link NodeAssetInputEntityData} optionals - * @param types the types that should be used for enrichment and to build {@link - * SystemParticipantTypedEntityData} from - * @param the type of the provided entity types as well as the type parameter of the resulting - * {@link SystemParticipantTypedEntityData} - * @return a stream of optional {@link SystemParticipantTypedEntityData} instances or empty - * optionals if the type couldn't be found - */ - private - Stream>> buildTypedEntityData( - Stream> nodeAssetEntityDataStream, - Collection types) { - return nodeAssetEntityDataStream - .parallel() - .map( - nodeAssetInputEntityDataOpt -> - nodeAssetInputEntityDataOpt.flatMap( - nodeAssetInputEntityData -> - buildTypedEntityData(nodeAssetInputEntityData, types))); - } - - private - Optional> buildTypedEntityData( - NodeAssetInputEntityData nodeAssetInputEntityData, Collection types) { - return getAssetType( - types, - nodeAssetInputEntityData.getFieldsToValues(), - nodeAssetInputEntityData.getClass().getSimpleName()) - .map( - // if the optional is present, transform and return to the data, - // otherwise return an empty optional - assetType -> { - Map fieldsToAttributes = nodeAssetInputEntityData.getFieldsToValues(); - - // remove fields that are passed as objects to constructor - fieldsToAttributes.keySet().remove(TYPE); - - return new SystemParticipantTypedEntityData<>( - fieldsToAttributes, - nodeAssetInputEntityData.getTargetClass(), - nodeAssetInputEntityData.getOperatorInput(), - nodeAssetInputEntityData.getNode(), - assetType); - }); - } - - /** - * Enriches a given stream of {@link SystemParticipantTypedEntityData} optionals with a type of - * {@link ThermalBusInput} based on the provided collection of buses and the fields to values - * mapping inside the already provided {@link SystemParticipantTypedEntityData} instance. - * - * @param typedEntityDataStream the data stream of {@link SystemParticipantTypedEntityData} - * optionals - * @param thermalBuses the thermal buses that should be used for enrichment and to build {@link - * HpInputEntityData} - * @return stream of optional {@link HpInputEntityData} instances or empty optionals if they - * thermal bus couldn't be found - */ - private Stream> buildHpEntityData( - Stream>> typedEntityDataStream, - Collection thermalBuses) { - - return typedEntityDataStream - .parallel() - .map( - typedEntityDataOpt -> - typedEntityDataOpt.flatMap( - typedEntityData -> buildHpEntityData(typedEntityData, thermalBuses))); - } - - private Optional buildHpEntityData( - SystemParticipantTypedEntityData typedEntityData, - Collection thermalBuses) { - // get the raw data - Map fieldsToAttributes = typedEntityData.getFieldsToValues(); - - // get the thermal bus input for this chp unit and try to built the entity data - Optional hpInputEntityDataOpt = - Optional.ofNullable(fieldsToAttributes.get(THERMAL_BUS)) - .flatMap( - thermalBusUuid -> - thermalBuses.stream() - .filter( - storage -> - storage.getUuid().toString().equalsIgnoreCase(thermalBusUuid)) - .findFirst() - .map( - thermalBus -> { - - // remove fields that are passed as objects to constructor - fieldsToAttributes.keySet().remove(THERMAL_BUS); - - return new HpInputEntityData( - fieldsToAttributes, - typedEntityData.getOperatorInput(), - typedEntityData.getNode(), - typedEntityData.getTypeInput(), - thermalBus); - })); - - // if the requested entity is not present we return an empty element and - // log a warning - if (hpInputEntityDataOpt.isEmpty()) { - logSkippingWarning( - typedEntityData.getTargetClass().getSimpleName(), - saveMapGet(fieldsToAttributes, "uuid", FIELDS_TO_VALUES_MAP), - saveMapGet(fieldsToAttributes, "id", FIELDS_TO_VALUES_MAP), - "thermalBus: " + saveMapGet(fieldsToAttributes, THERMAL_BUS, FIELDS_TO_VALUES_MAP)); - } - - return hpInputEntityDataOpt; - } - - /** - * Enriches a given stream of {@link SystemParticipantTypedEntityData} optionals with a type of - * {@link ThermalBusInput} and {@link ThermalStorageInput} based on the provided collection of - * buses, storages and the fields to values mapping inside the already provided {@link - * SystemParticipantTypedEntityData} instance. - * - * @param typedEntityDataStream the data stream of {@link SystemParticipantTypedEntityData} - * optionals - * @param thermalStorages the thermal storages that should be used for enrichment and to build - * {@link ChpInputEntityData} - * @param thermalBuses the thermal buses that should be used for enrichment and to build {@link - * ChpInputEntityData} - * @return stream of optional {@link ChpInputEntityData}instances or empty optionals if they - * thermal bus couldn't be found - */ - private Stream> buildChpEntityData( - Stream>> typedEntityDataStream, - Collection thermalStorages, - Collection thermalBuses) { - - return typedEntityDataStream - .parallel() - .map( - typedEntityDataOpt -> - typedEntityDataOpt.flatMap( - typedEntityData -> - buildChpEntityData(typedEntityData, thermalStorages, thermalBuses))); - } - - private Optional buildChpEntityData( - SystemParticipantTypedEntityData typedEntityData, - Collection thermalStorages, - Collection thermalBuses) { - - // get the raw data - Map fieldsToAttributes = typedEntityData.getFieldsToValues(); - - // get the thermal storage input for this chp unit - Optional thermalStorage = - Optional.ofNullable(fieldsToAttributes.get(THERMAL_STORAGE)) - .flatMap( - thermalStorageUuid -> findFirstEntityByUuid(thermalStorageUuid, thermalStorages)); - - // get the thermal bus input for this chp unit - Optional thermalBus = - Optional.ofNullable(fieldsToAttributes.get("thermalBus")) - .flatMap(thermalBusUuid -> findFirstEntityByUuid(thermalBusUuid, thermalBuses)); - - // if the thermal storage or the thermal bus are not present we return an - // empty element and log a warning - if (!thermalStorage.isPresent() || !thermalBus.isPresent()) { - StringBuilder sB = new StringBuilder(); - if (!thermalStorage.isPresent()) { - sB.append("thermalStorage: ") - .append(saveMapGet(fieldsToAttributes, THERMAL_STORAGE, FIELDS_TO_VALUES_MAP)); - } - if (!thermalBus.isPresent()) { - sB.append("\nthermalBus: ") - .append(saveMapGet(fieldsToAttributes, THERMAL_BUS, FIELDS_TO_VALUES_MAP)); - } - - logSkippingWarning( - typedEntityData.getTargetClass().getSimpleName(), - saveMapGet(fieldsToAttributes, "uuid", FIELDS_TO_VALUES_MAP), - saveMapGet(fieldsToAttributes, "id", FIELDS_TO_VALUES_MAP), - sB.toString()); - - return Optional.empty(); - } - - // remove fields that are passed as objects to constructor - fieldsToAttributes - .keySet() - .removeAll(new HashSet<>(Arrays.asList("thermalBus", "thermalStorage"))); - - return Optional.of( - new ChpInputEntityData( - fieldsToAttributes, - typedEntityData.getOperatorInput(), - typedEntityData.getNode(), - typedEntityData.getTypeInput(), - thermalBus.get(), - thermalStorage.get())); - } - - @Override - public Set getEmSystems() { - Set operators = typeSource.getOperators(); - return getEmSystems(rawGridSource.getNodes(operators), operators); - } - - /** - * {@inheritDoc} - * - *

If the set of {@link NodeInput} entities is not exhaustive for all available {@link - * LoadInput} entities (e.g. a {@link NodeInput} entity is missing) or if an error during the - * building process occurs, the entity that misses something will be skipped (which can be seen as - * a filtering functionality), but all entities that are able to be built will be returned anyway - * and the elements that couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getEmSystems(Set nodes, Set operators) { - return nodeAssetEntityStream(EmInput.class, emInputFactory, nodes, operators) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvThermalSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvThermalSource.java deleted file mode 100644 index f3903ff69..000000000 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvThermalSource.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * © 2021. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.source.csv; - -import edu.ie3.datamodel.io.factory.input.*; -import edu.ie3.datamodel.io.naming.FileNamingStrategy; -import edu.ie3.datamodel.io.source.ThermalSource; -import edu.ie3.datamodel.io.source.TypeSource; -import edu.ie3.datamodel.models.input.OperatorInput; -import edu.ie3.datamodel.models.input.thermal.CylindricalStorageInput; -import edu.ie3.datamodel.models.input.thermal.ThermalBusInput; -import edu.ie3.datamodel.models.input.thermal.ThermalHouseInput; -import edu.ie3.datamodel.models.input.thermal.ThermalStorageInput; -import java.util.*; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -/** - * Source that provides the capability to build thermal {@link - * edu.ie3.datamodel.models.input.AssetInput} entities from .csv files - * - *

This source is not buffered which means each call on a getter method always tries to - * read all data is necessary to return the requested objects in a hierarchical cascading way. - * - *

If performance is an issue, it is recommended to read the data cascading starting with reading - * nodes and then using the getters with arguments to avoid reading the same data multiple times. - * - *

The resulting sets are always unique on object and UUID base (with distinct UUIDs). - * - * @version 0.1 - * @since 03.04.20 - */ -public class CsvThermalSource extends CsvDataSource implements ThermalSource { - - // general fields - private final TypeSource typeSource; - - // factories - private final ThermalBusInputFactory thermalBusInputFactory; - private final CylindricalStorageInputFactory cylindricalStorageInputFactory; - private final ThermalHouseInputFactory thermalHouseInputFactory; - - public CsvThermalSource( - String csvSep, - String thermalUnitsFolderPath, - FileNamingStrategy fileNamingStrategy, - TypeSource typeSource) { - super(csvSep, thermalUnitsFolderPath, fileNamingStrategy); - this.typeSource = typeSource; - - // init factories - this.thermalBusInputFactory = new ThermalBusInputFactory(); - this.cylindricalStorageInputFactory = new CylindricalStorageInputFactory(); - this.thermalHouseInputFactory = new ThermalHouseInputFactory(); - } - /** {@inheritDoc} */ - @Override - public Set getThermalBuses() { - return assetInputEntityDataStream(ThermalBusInput.class, typeSource.getOperators()) - .map(thermalBusInputFactory::get) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - - /** - * {@inheritDoc} - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getThermalBuses(Set operators) { - return assetInputEntityDataStream(ThermalBusInput.class, operators) - .map(thermalBusInputFactory::get) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } - /** {@inheritDoc} */ - @Override - public Set getThermalStorages() { - return new HashSet<>(getCylindricStorages()); - } - - /** - * {@inheritDoc} - * - *

If the set of {@link ThermalBusInput} entities is not exhaustive for all available {@link - * ThermalStorageInput} entities (e.g. a {@link ThermalBusInput} entity is missing) or if an error - * during the building process occurs, the entity that misses something will be skipped (which can - * be seen as a filtering functionality) but all entities that are able to be built will be - * returned anyway and the elements that couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getThermalStorages( - Set operators, Set thermalBuses) { - return new HashSet<>(getCylindricStorages(operators, thermalBuses)); - } - /** {@inheritDoc} */ - @Override - public Set getThermalHouses() { - return assetInputEntityDataStream(ThermalHouseInput.class, typeSource.getOperators()) - .flatMap( - assetInputEntityData -> - buildThermalUnitInputEntityData(assetInputEntityData, getThermalBuses()) - .map(dataOpt -> dataOpt.flatMap(thermalHouseInputFactory::get)) - .flatMap(Optional::stream)) - .collect(Collectors.toSet()); - } - - /** - * {@inheritDoc} - * - *

If the set of {@link ThermalBusInput} entities is not exhaustive for all available {@link - * ThermalHouseInput} entities (e.g. a {@link ThermalBusInput} entity is missing) or if an error - * during the building process occurs, the entity that misses something will be skipped (which can - * be seen as a filtering functionality) but all entities that are able to be built will be - * returned anyway and the elements that couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getThermalHouses( - Set operators, Set thermalBuses) { - - return assetInputEntityDataStream(ThermalHouseInput.class, operators) - .map( - assetInputEntityData -> - buildThermalUnitInputEntityData(assetInputEntityData, thermalBuses) - .map(dataOpt -> dataOpt.flatMap(thermalHouseInputFactory::get))) - .flatMap(elements -> elements.flatMap(Optional::stream)) - .collect(Collectors.toSet()); - } - /** {@inheritDoc} */ - @Override - public Set getCylindricStorages() { - - return assetInputEntityDataStream(CylindricalStorageInput.class, typeSource.getOperators()) - .map( - assetInputEntityData -> - buildThermalUnitInputEntityData(assetInputEntityData, getThermalBuses()) - .map(dataOpt -> dataOpt.flatMap(cylindricalStorageInputFactory::get))) - .flatMap(elements -> elements.flatMap(Optional::stream)) - .collect(Collectors.toSet()); - } - - /** - * {@inheritDoc} - * - *

If the set of {@link ThermalBusInput} entities is not exhaustive for all available {@link - * CylindricalStorageInput} entities (e.g. a {@link ThermalBusInput} entity is missing) or if an - * error during the building process occurs, the entity that misses something will be skipped - * (which can be seen as a filtering functionality) but all entities that are able to be built - * will be returned anyway and the elements that couldn't have been built are logged. - * - *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set - * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} - */ - @Override - public Set getCylindricStorages( - Set operators, Set thermalBuses) { - - return assetInputEntityDataStream(CylindricalStorageInput.class, operators) - .map( - assetInputEntityData -> - buildThermalUnitInputEntityData(assetInputEntityData, thermalBuses) - .map(dataOpt -> dataOpt.flatMap(cylindricalStorageInputFactory::get))) - .flatMap(elements -> elements.flatMap(Optional::stream)) - .collect(Collectors.toSet()); - } - - private Stream> buildThermalUnitInputEntityData( - AssetInputEntityData assetInputEntityData, Collection thermalBuses) { - - // get the raw data - Map fieldsToAttributes = assetInputEntityData.getFieldsToValues(); - - // get the thermal bus input for this chp unit - String thermalBusUuid = fieldsToAttributes.get("thermalbus"); - Optional thermalBus = - thermalBuses.stream() - .filter(storage -> storage.getUuid().toString().equalsIgnoreCase(thermalBusUuid)) - .findFirst(); - - // remove fields that are passed as objects to constructor - fieldsToAttributes.keySet().removeAll(new HashSet<>(Collections.singletonList("thermalbus"))); - - // if the type is not present we return an empty element and - // log a warning - if (thermalBus.isEmpty()) { - logSkippingWarning( - assetInputEntityData.getTargetClass().getSimpleName(), - fieldsToAttributes.get("uuid"), - fieldsToAttributes.get("id"), - "thermalBus: " + thermalBusUuid); - return Stream.of(Optional.empty()); - } - - return Stream.of( - Optional.of( - new ThermalUnitInputEntityData( - assetInputEntityData.getFieldsToValues(), - assetInputEntityData.getTargetClass(), - assetInputEntityData.getOperatorInput(), - thermalBus.get()))); - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMappingSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMappingSource.java index cb3fad7a1..c6affdb71 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMappingSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMappingSource.java @@ -5,51 +5,24 @@ */ package edu.ie3.datamodel.io.source.csv; -import edu.ie3.datamodel.io.factory.SimpleEntityData; -import edu.ie3.datamodel.io.factory.timeseries.TimeSeriesMappingFactory; import edu.ie3.datamodel.io.naming.FileNamingStrategy; import edu.ie3.datamodel.io.source.TimeSeriesMappingSource; +import java.nio.file.Path; import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.stream.Collectors; +import java.util.stream.Stream; -public class CsvTimeSeriesMappingSource extends CsvDataSource implements TimeSeriesMappingSource { - /* Available factories */ - private static final TimeSeriesMappingFactory mappingFactory = new TimeSeriesMappingFactory(); +public class CsvTimeSeriesMappingSource extends TimeSeriesMappingSource { - private final Map mapping; + private final CsvDataSource dataSource; public CsvTimeSeriesMappingSource( - String csvSep, String folderPath, FileNamingStrategy fileNamingStrategy) { - super(csvSep, folderPath, fileNamingStrategy); - - /* Build the map */ - mapping = - buildStreamWithFieldsToAttributesMap(MappingEntry.class, connector) - .map( - fieldToValues -> { - SimpleEntityData entityData = - new SimpleEntityData(fieldToValues, MappingEntry.class); - return mappingFactory.get(entityData); - }) - .flatMap(Optional::stream) - .collect(Collectors.toMap(MappingEntry::getParticipant, MappingEntry::getTimeSeries)); - } - - @Override - public Map getMapping() { - return mapping; + String csvSep, Path gridFolderPath, FileNamingStrategy fileNamingStrategy) { + this.dataSource = new CsvDataSource(csvSep, gridFolderPath, fileNamingStrategy); } - /** - * @deprecated since 3.0. Use {@link - * CsvTimeSeriesMetaInformationSource#getTimeSeriesMetaInformation()} instead - */ @Override - @Deprecated(since = "3.0", forRemoval = true) - public Optional - getTimeSeriesMetaInformation(UUID timeSeriesUuid) { - return connector.getIndividualTimeSeriesMetaInformation(timeSeriesUuid); + public Stream> getMappingSourceData() { + return dataSource.buildStreamWithFieldsToAttributesMap( + MappingEntry.class, dataSource.connector); } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSource.java index 11926a7f4..d657d30cb 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSource.java @@ -5,11 +5,13 @@ */ package edu.ie3.datamodel.io.source.csv; +import edu.ie3.datamodel.io.csv.CsvIndividualTimeSeriesMetaInformation; import edu.ie3.datamodel.io.naming.FileNamingStrategy; import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme; import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation; import edu.ie3.datamodel.io.source.TimeSeriesMetaInformationSource; import edu.ie3.datamodel.utils.TimeSeriesUtils; +import java.nio.file.Path; import java.util.Map; import java.util.Optional; import java.util.UUID; @@ -19,11 +21,11 @@ * CSV implementation for retrieving {@link TimeSeriesMetaInformationSource} from input directory * structures */ -public class CsvTimeSeriesMetaInformationSource extends CsvDataSource - implements TimeSeriesMetaInformationSource { +public class CsvTimeSeriesMetaInformationSource implements TimeSeriesMetaInformationSource { - private final Map - timeSeriesMetaInformation; + protected final CsvDataSource dataSource; + + private final Map timeSeriesMetaInformation; /** * Creates a time series type source @@ -33,12 +35,11 @@ public class CsvTimeSeriesMetaInformationSource extends CsvDataSource * @param fileNamingStrategy the file naming strategy */ public CsvTimeSeriesMetaInformationSource( - String csvSep, String folderPath, FileNamingStrategy fileNamingStrategy) { - super(csvSep, folderPath, fileNamingStrategy); - + String csvSep, Path folderPath, FileNamingStrategy fileNamingStrategy) { + this.dataSource = new CsvDataSource(csvSep, folderPath, fileNamingStrategy); // retrieve only the desired time series - timeSeriesMetaInformation = - connector.getCsvIndividualTimeSeriesMetaInformation( + this.timeSeriesMetaInformation = + dataSource.connector.getCsvIndividualTimeSeriesMetaInformation( TimeSeriesUtils.getAcceptedColumnSchemes().toArray(new ColumnScheme[0])); } diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSource.java index f380bd11a..4b8d55604 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSource.java @@ -5,6 +5,8 @@ */ package edu.ie3.datamodel.io.source.csv; +import edu.ie3.datamodel.exceptions.FactoryException; +import edu.ie3.datamodel.exceptions.FailureException; import edu.ie3.datamodel.exceptions.SourceException; import edu.ie3.datamodel.io.csv.CsvIndividualTimeSeriesMetaInformation; import edu.ie3.datamodel.io.factory.timeseries.*; @@ -14,19 +16,21 @@ import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue; import edu.ie3.datamodel.models.value.*; import edu.ie3.datamodel.utils.TimeSeriesUtils; +import edu.ie3.datamodel.utils.Try; import edu.ie3.util.interval.ClosedInterval; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.IOException; +import java.nio.file.Path; import java.time.ZonedDateTime; import java.util.*; import java.util.function.Function; -import java.util.stream.Collectors; +import java.util.stream.Stream; /** Source that is capable of providing information around time series from csv files. */ -public class CsvTimeSeriesSource extends CsvDataSource - implements TimeSeriesSource { +public class CsvTimeSeriesSource extends TimeSeriesSource { private final IndividualTimeSeries timeSeries; + private final CsvDataSource dataSource; /** * Factory method to build a source from given meta information @@ -37,60 +41,10 @@ public class CsvTimeSeriesSource extends CsvDataSource * @param metaInformation The given meta information * @throws SourceException If the given meta information are not supported * @return The source - * @deprecated since 3.0. Use {@link CsvTimeSeriesSource#getSource(java.lang.String, - * java.lang.String, edu.ie3.datamodel.io.naming.FileNamingStrategy, - * edu.ie3.datamodel.io.csv.CsvIndividualTimeSeriesMetaInformation)} instead. */ - @Deprecated(since = "3.0", forRemoval = true) public static CsvTimeSeriesSource getSource( String csvSep, - String folderPath, - FileNamingStrategy fileNamingStrategy, - edu.ie3.datamodel.io.connectors.CsvFileConnector.CsvIndividualTimeSeriesMetaInformation - metaInformation) - throws SourceException { - if (!TimeSeriesSource.isSchemeAccepted(metaInformation.getColumnScheme())) - throw new SourceException( - "Unsupported column scheme '" + metaInformation.getColumnScheme() + "'."); - - Class valClass = metaInformation.getColumnScheme().getValueClass(); - - return create(csvSep, folderPath, fileNamingStrategy, metaInformation, valClass); - } - - /** @deprecated since 3.0 */ - @Deprecated(since = "3.0", forRemoval = true) - private static CsvTimeSeriesSource create( - String csvSep, - String folderPath, - FileNamingStrategy fileNamingStrategy, - edu.ie3.datamodel.io.connectors.CsvFileConnector.CsvIndividualTimeSeriesMetaInformation - metaInformation, - Class valClass) { - TimeBasedSimpleValueFactory valueFactory = new TimeBasedSimpleValueFactory<>(valClass); - return new CsvTimeSeriesSource<>( - csvSep, - folderPath, - fileNamingStrategy, - metaInformation.getUuid(), - metaInformation.getFullFilePath(), - valClass, - valueFactory); - } - - /** - * Factory method to build a source from given meta information - * - * @param csvSep the separator string for csv columns - * @param folderPath path to the folder holding the time series files - * @param fileNamingStrategy strategy for the file naming of time series files / data sinks - * @param metaInformation The given meta information - * @throws SourceException If the given meta information are not supported - * @return The source - */ - public static CsvTimeSeriesSource getSource( - String csvSep, - String folderPath, + Path folderPath, FileNamingStrategy fileNamingStrategy, CsvIndividualTimeSeriesMetaInformation metaInformation) throws SourceException { @@ -105,7 +59,7 @@ public static CsvTimeSeriesSource getSource( private static CsvTimeSeriesSource create( String csvSep, - String folderPath, + Path folderPath, FileNamingStrategy fileNamingStrategy, CsvIndividualTimeSeriesMetaInformation metaInformation, Class valClass) { @@ -133,21 +87,19 @@ private static CsvTimeSeriesSource create( */ public CsvTimeSeriesSource( String csvSep, - String folderPath, + Path folderPath, FileNamingStrategy fileNamingStrategy, UUID timeSeriesUuid, - String filePath, + Path filePath, Class valueClass, TimeBasedSimpleValueFactory factory) { - super(csvSep, folderPath, fileNamingStrategy); + super(valueClass, factory); + this.dataSource = new CsvDataSource(csvSep, folderPath, fileNamingStrategy); /* Read in the full time series */ try { this.timeSeries = - buildIndividualTimeSeries( - timeSeriesUuid, - filePath, - fieldToValue -> this.buildTimeBasedValue(fieldToValue, valueClass, factory)); + buildIndividualTimeSeries(timeSeriesUuid, filePath, this::createTimeBasedValue); } catch (SourceException e) { throw new IllegalArgumentException( "Unable to obtain time series with UUID '" @@ -172,6 +124,8 @@ public Optional getValue(ZonedDateTime time) { return timeSeries.getValue(time); } + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + /** * Attempts to read a time series with given unique identifier and file path. Single entries are * obtained entries with the help of {@code fieldToValueFunction}. @@ -181,43 +135,28 @@ public Optional getValue(ZonedDateTime time) { * @param fieldToValueFunction function, that is able to transfer a mapping (from field to value) * onto a specific instance of the targeted entry class * @throws SourceException If the file cannot be read properly - * @return An option onto an individual time series + * @return an individual time series */ - private IndividualTimeSeries buildIndividualTimeSeries( + protected IndividualTimeSeries buildIndividualTimeSeries( UUID timeSeriesUuid, - String filePath, - Function, Optional>> fieldToValueFunction) + Path filePath, + Function, Try, FactoryException>> fieldToValueFunction) throws SourceException { - try (BufferedReader reader = connector.initReader(filePath)) { - Set> timeBasedValues = - buildStreamWithFieldsToAttributesMap(TimeBasedValue.class, reader) - .map(fieldToValueFunction) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - - return new IndividualTimeSeries<>(timeSeriesUuid, timeBasedValues); + try (BufferedReader reader = dataSource.connector.initReader(filePath)) { + Try>, FailureException> timeBasedValues = + Try.scanStream( + dataSource + .buildStreamWithFieldsToAttributesMap(TimeBasedValue.class, reader) + .map(fieldToValueFunction), + "TimeBasedValue"); + return new IndividualTimeSeries<>( + timeSeriesUuid, new HashSet<>(timeBasedValues.getOrThrow().toList())); } catch (FileNotFoundException e) { throw new SourceException("Unable to find a file with path '" + filePath + "'.", e); } catch (IOException e) { throw new SourceException("Error during reading of file'" + filePath + "'.", e); + } catch (FailureException e) { + throw new SourceException("Unable to build individual time series. ", e.getCause()); } } - - /** - * Build a {@link TimeBasedValue} of type {@code V}, whereas the underlying {@link Value} does not - * need any additional information. - * - * @param fieldToValues Mapping from field id to values - * @param valueClass Class of the desired underlying value - * @param factory Factory to process the "flat" information - * @return Optional simple time based value - */ - private Optional> buildTimeBasedValue( - Map fieldToValues, - Class valueClass, - TimeBasedSimpleValueFactory factory) { - SimpleTimeBasedValueData factoryData = - new SimpleTimeBasedValueData<>(fieldToValues, valueClass); - return factory.get(factoryData); - } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTypeSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTypeSource.java deleted file mode 100644 index 16094eb47..000000000 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTypeSource.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * © 2021. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.source.csv; - -import edu.ie3.datamodel.io.factory.EntityFactory; -import edu.ie3.datamodel.io.factory.SimpleEntityData; -import edu.ie3.datamodel.io.factory.input.OperatorInputFactory; -import edu.ie3.datamodel.io.factory.typeinput.LineTypeInputFactory; -import edu.ie3.datamodel.io.factory.typeinput.SystemParticipantTypeInputFactory; -import edu.ie3.datamodel.io.factory.typeinput.Transformer2WTypeInputFactory; -import edu.ie3.datamodel.io.factory.typeinput.Transformer3WTypeInputFactory; -import edu.ie3.datamodel.io.naming.FileNamingStrategy; -import edu.ie3.datamodel.io.source.TypeSource; -import edu.ie3.datamodel.models.input.InputEntity; -import edu.ie3.datamodel.models.input.OperatorInput; -import edu.ie3.datamodel.models.input.connector.type.LineTypeInput; -import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput; -import edu.ie3.datamodel.models.input.connector.type.Transformer3WTypeInput; -import edu.ie3.datamodel.models.input.system.type.*; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; - -/** - * Source that provides the capability to build entities of type {@link SystemParticipantTypeInput} - * and {@link OperatorInput} from .csv files - * - * @version 0.1 - * @since 05.04.20 - */ -public class CsvTypeSource extends CsvDataSource implements TypeSource { - - // factories - private final OperatorInputFactory operatorInputFactory; - private final Transformer2WTypeInputFactory transformer2WTypeInputFactory; - private final LineTypeInputFactory lineTypeInputFactory; - private final Transformer3WTypeInputFactory transformer3WTypeInputFactory; - private final SystemParticipantTypeInputFactory systemParticipantTypeInputFactory; - - public CsvTypeSource( - String csvSep, String typeFolderPath, FileNamingStrategy fileNamingStrategy) { - super(csvSep, typeFolderPath, fileNamingStrategy); - - // init factories - operatorInputFactory = new OperatorInputFactory(); - transformer2WTypeInputFactory = new Transformer2WTypeInputFactory(); - lineTypeInputFactory = new LineTypeInputFactory(); - transformer3WTypeInputFactory = new Transformer3WTypeInputFactory(); - systemParticipantTypeInputFactory = new SystemParticipantTypeInputFactory(); - } - /** {@inheritDoc} */ - @Override - public Set getTransformer2WTypes() { - return buildSimpleEntities(Transformer2WTypeInput.class, transformer2WTypeInputFactory); - } - /** {@inheritDoc} */ - @Override - public Set getOperators() { - return buildSimpleEntities(OperatorInput.class, operatorInputFactory); - } - /** {@inheritDoc} */ - @Override - public Set getLineTypes() { - return buildSimpleEntities(LineTypeInput.class, lineTypeInputFactory); - } - /** {@inheritDoc} */ - @Override - public Set getTransformer3WTypes() { - return buildSimpleEntities(Transformer3WTypeInput.class, transformer3WTypeInputFactory); - } - /** {@inheritDoc} */ - @Override - public Set getBmTypes() { - return buildSimpleEntities(BmTypeInput.class, systemParticipantTypeInputFactory); - } - /** {@inheritDoc} */ - @Override - public Set getChpTypes() { - return buildSimpleEntities(ChpTypeInput.class, systemParticipantTypeInputFactory); - } - /** {@inheritDoc} */ - @Override - public Set getHpTypes() { - return buildSimpleEntities(HpTypeInput.class, systemParticipantTypeInputFactory); - } - /** {@inheritDoc} */ - @Override - public Set getStorageTypes() { - return buildSimpleEntities(StorageTypeInput.class, systemParticipantTypeInputFactory); - } - /** {@inheritDoc} */ - @Override - public Set getWecTypes() { - return buildSimpleEntities(WecTypeInput.class, systemParticipantTypeInputFactory); - } - /** {@inheritDoc} */ - @Override - public Set getEvTypes() { - return buildSimpleEntities(EvTypeInput.class, systemParticipantTypeInputFactory); - } - - /** - * Tries to build a set of {@link InputEntity}s of the provided entity class based on the provided - * factory. To do so, first entity data of type {@link SimpleEntityData} is constructed based on - * the input .csv file that can be derived from the entity class. This data is than passed to the - * factory and used to build the corresponding entities. - * - *

Be careful, that always a factory that is able to produce an entity of type is passed - * into as argument. Otherwise, a casting exception will be thrown. - * - * @param entityClass the concrete class of the {@link InputEntity} that should be built - * @param factory the entity factory that should be used - * @param the type of the resulting entity - * @return a set containing all entities that could have been built or an empty set if no entity - * could been built - */ - private Set buildSimpleEntities( - Class entityClass, EntityFactory factory) { - return buildStreamWithFieldsToAttributesMap(entityClass, connector) - .map( - fieldsToAttributes -> { - SimpleEntityData data = new SimpleEntityData(fieldsToAttributes, entityClass); - return (Optional) factory.get(data); - }) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvWeatherSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvWeatherSource.java index 66f69939b..1997edbd1 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvWeatherSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvWeatherSource.java @@ -7,7 +7,6 @@ import edu.ie3.datamodel.io.connectors.CsvFileConnector; import edu.ie3.datamodel.io.csv.CsvIndividualTimeSeriesMetaInformation; -import edu.ie3.datamodel.io.factory.timeseries.IdCoordinateFactory; import edu.ie3.datamodel.io.factory.timeseries.TimeBasedWeatherValueData; import edu.ie3.datamodel.io.factory.timeseries.TimeBasedWeatherValueFactory; import edu.ie3.datamodel.io.naming.FileNamingStrategy; @@ -24,6 +23,7 @@ import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.IOException; +import java.nio.file.Path; import java.time.ZonedDateTime; import java.util.*; import java.util.function.Function; @@ -32,37 +32,9 @@ import org.locationtech.jts.geom.Point; /** Implements a WeatherSource for CSV files by using the CsvTimeSeriesSource as a base */ -public class CsvWeatherSource extends CsvDataSource implements WeatherSource { +public class CsvWeatherSource extends WeatherSource { - private final TimeBasedWeatherValueFactory weatherFactory; - - private final Map> coordinateToTimeSeries; - private final IdCoordinateSource coordinateSource; - - /** - * Initializes a CsvWeatherSource with a {@link CsvIdCoordinateSource} instance and immediately - * imports weather data, which will be kept for the lifetime of this source - * - * @param csvSep the separator string for csv columns - * @param folderPath path to the folder holding the time series files - * @param fileNamingStrategy strategy for the file naming of time series files / data sinks - * @param weatherFactory factory to transfer field to value mapping into actual java object - * instances - * @param coordinateFactory factory to build coordinate id to coordinate mapping - */ - public CsvWeatherSource( - String csvSep, - String folderPath, - FileNamingStrategy fileNamingStrategy, - TimeBasedWeatherValueFactory weatherFactory, - IdCoordinateFactory coordinateFactory) { - this( - csvSep, - folderPath, - fileNamingStrategy, - new CsvIdCoordinateSource(csvSep, folderPath, fileNamingStrategy, coordinateFactory), - weatherFactory); - } + private final CsvDataSource dataSource; /** * Initializes a CsvWeatherSource and immediately imports weather data, which will be kept for the @@ -71,35 +43,22 @@ public CsvWeatherSource( * @param csvSep the separator string for csv columns * @param folderPath path to the folder holding the time series files * @param fileNamingStrategy strategy for the file naming of time series files / data sinks - * @param coordinateSource a coordinate source to map ids to points + * @param idCoordinateSource a coordinate source to map ids to points * @param weatherFactory factory to transfer field to value mapping into actual java object * instances */ public CsvWeatherSource( String csvSep, - String folderPath, + Path folderPath, FileNamingStrategy fileNamingStrategy, - IdCoordinateSource coordinateSource, + IdCoordinateSource idCoordinateSource, TimeBasedWeatherValueFactory weatherFactory) { - super(csvSep, folderPath, fileNamingStrategy); - this.coordinateSource = coordinateSource; - this.weatherFactory = weatherFactory; - + super(idCoordinateSource, weatherFactory); + this.dataSource = new CsvDataSource(csvSep, folderPath, fileNamingStrategy); coordinateToTimeSeries = getWeatherTimeSeries(); } - /** - * Creates reader for all available weather time series files and then continues to parse them - * - * @return a map of coordinates to their time series - */ - private Map> getWeatherTimeSeries() { - /* Get only weather time series meta information */ - Collection weatherCsvMetaInformation = - connector.getCsvIndividualTimeSeriesMetaInformation(ColumnScheme.WEATHER).values(); - - return readWeatherTimeSeries(Set.copyOf(weatherCsvMetaInformation), connector); - } + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- @Override public Map> getWeather( @@ -124,6 +83,8 @@ public Optional> getWeather(ZonedDateTime date, Poi return timeSeries.getTimeBasedValue(date); } + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + /** * Trims all time series in a map to the given time interval * @@ -143,6 +104,32 @@ private Map> trimMapToInterval( entry -> TimeSeriesUtils.trimTimeSeriesToInterval(entry.getValue(), timeInterval))); } + /** + * Merge two individual time series into a new time series with the UUID of the first parameter + * + * @param a the first time series to merge + * @param b the second time series to merge + * @return merged time series with a's UUID + */ + protected IndividualTimeSeries mergeTimeSeries( + IndividualTimeSeries a, IndividualTimeSeries b) { + SortedSet> entries = a.getEntries(); + entries.addAll(b.getEntries()); + return new IndividualTimeSeries<>(a.getUuid(), entries); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + + private Map> getWeatherTimeSeries() { + /* Get only weather time series meta information */ + Collection weatherCsvMetaInformation = + dataSource + .connector + .getCsvIndividualTimeSeriesMetaInformation(ColumnScheme.WEATHER) + .values(); + return readWeatherTimeSeries(Set.copyOf(weatherCsvMetaInformation), dataSource.connector); + } + /** * Reads weather data to time series and maps them coordinate wise * @@ -187,6 +174,36 @@ private Map> readWeatherTimeSeries( return weatherTimeSeries; } + private Stream> buildStreamWithFieldsToAttributesMap( + Class entityClass, BufferedReader bufferedReader) { + try (BufferedReader reader = bufferedReader) { + final String[] headline = dataSource.parseCsvRow(reader.readLine(), dataSource.csvSep); + + // by default try-with-resources closes the reader directly when we leave this method (which + // is wanted to avoid a lock on the file), but this causes a closing of the stream as well. + // As we still want to consume the data at other places, we start a new stream instead of + // returning the original one + Collection> allRows = + dataSource.csvRowFieldValueMapping(reader, headline); + + Function, String> timeCoordinateIdExtractor = + fieldToValues -> + fieldToValues + .get(weatherFactory.getTimeFieldString()) + .concat(fieldToValues.get(weatherFactory.getCoordinateIdFieldString())); + return dataSource + .distinctRowsWithLog( + allRows, timeCoordinateIdExtractor, entityClass.getSimpleName(), "UUID") + .parallelStream(); + + } catch (IOException e) { + log.warn( + "Cannot read file to build entity '{}': {}", entityClass.getSimpleName(), e.getMessage()); + } + + return Stream.empty(); + } + /** * Builds a {@link TimeBasedValue} of type {@link WeatherValue} from given "flat " input * information. If the single model cannot be built, an empty optional is handed back. @@ -207,7 +224,7 @@ private Optional> buildWeatherValue( /* Build factory data */ TimeBasedWeatherValueData factoryData = new TimeBasedWeatherValueData(fieldToValues, coordinate); - return weatherFactory.get(factoryData); + return weatherFactory.get(factoryData).getData(); }) .orElseGet( () -> { @@ -216,50 +233,6 @@ private Optional> buildWeatherValue( }); } - /** - * Reads the first line (considered to be the headline with headline fields) and returns a stream - * of (fieldName to fieldValue) mapping where each map represents one row of the .csv file. Since - * the returning stream is a parallel stream, the order of the elements cannot be guaranteed. - * - *

This method overrides {@link CsvDataSource#buildStreamWithFieldsToAttributesMap(Class, - * BufferedReader)} to not do sanity check for available UUID. This is because the weather source - * might make use of ICON weather data, which don't have a UUID. For weather it is indeed not - * necessary, to have one unique UUID. - * - * @param entityClass the entity class that should be build - * @param bufferedReader the reader to use - * @return a parallel stream of maps, where each map represents one row of the csv file with the - * mapping (fieldName to fieldValue) - */ - @Override - protected Stream> buildStreamWithFieldsToAttributesMap( - Class entityClass, BufferedReader bufferedReader) { - try (BufferedReader reader = bufferedReader) { - final String[] headline = parseCsvRow(reader.readLine(), csvSep); - - // by default try-with-resources closes the reader directly when we leave this method (which - // is wanted to avoid a lock on the file), but this causes a closing of the stream as well. - // As we still want to consume the data at other places, we start a new stream instead of - // returning the original one - Collection> allRows = csvRowFieldValueMapping(reader, headline); - - Function, String> timeCoordinateIdExtractor = - fieldToValues -> - fieldToValues - .get(weatherFactory.getTimeFieldString()) - .concat(fieldToValues.get(weatherFactory.getCoordinateIdFieldString())); - return distinctRowsWithLog( - allRows, timeCoordinateIdExtractor, entityClass.getSimpleName(), "UUID") - .parallelStream(); - - } catch (IOException e) { - log.warn( - "Cannot read file to build entity '{}': {}", entityClass.getSimpleName(), e.getMessage()); - } - - return Stream.empty(); - } - /** * Extract the coordinate identifier from the field to value mapping and obtain the actual * coordinate in collaboration with the source. @@ -277,20 +250,6 @@ private Optional extractCoordinate(Map fieldToValues) { return Optional.empty(); } int coordinateId = Integer.parseInt(coordinateString); - return coordinateSource.getCoordinate(coordinateId); - } - - /** - * Merge two individual time series into a new time series with the UUID of the first parameter - * - * @param a the first time series to merge - * @param b the second time series to merge - * @return merged time series with a's UUID - */ - private IndividualTimeSeries mergeTimeSeries( - IndividualTimeSeries a, IndividualTimeSeries b) { - SortedSet> entries = a.getEntries(); - entries.addAll(b.getEntries()); - return new IndividualTimeSeries<>(a.getUuid(), entries); + return idCoordinateSource.getCoordinate(coordinateId); } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/influxdb/InfluxDbWeatherSource.java b/src/main/java/edu/ie3/datamodel/io/source/influxdb/InfluxDbWeatherSource.java index 0e3d182b8..de3269b61 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/influxdb/InfluxDbWeatherSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/influxdb/InfluxDbWeatherSource.java @@ -13,6 +13,7 @@ import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries; import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue; import edu.ie3.datamodel.models.value.WeatherValue; +import edu.ie3.datamodel.utils.Try; import edu.ie3.util.StringUtils; import edu.ie3.util.interval.ClosedInterval; import java.time.ZonedDateTime; @@ -25,7 +26,7 @@ import org.locationtech.jts.geom.Point; /** InfluxDB Source for weather data */ -public class InfluxDbWeatherSource implements WeatherSource { +public class InfluxDbWeatherSource extends WeatherSource { private static final String BASIC_QUERY_STRING = "Select * from weather"; private static final String WHERE = " where "; private static final String AND = " and "; @@ -34,24 +35,21 @@ public class InfluxDbWeatherSource implements WeatherSource { private static final int MILLI_TO_NANO_FACTOR = 1000000; private final InfluxDbConnector connector; - private final IdCoordinateSource coordinateSource; - private final TimeBasedWeatherValueFactory weatherValueFactory; /** * Initializes a new InfluxDbWeatherSource * * @param connector needed for database connection - * @param coordinateSource needed to map coordinates to ID as InfluxDB does not support spatial + * @param idCoordinateSource needed to map coordinates to ID as InfluxDB does not support spatial * types * @param weatherValueFactory instance of a time based weather value factory */ public InfluxDbWeatherSource( InfluxDbConnector connector, - IdCoordinateSource coordinateSource, + IdCoordinateSource idCoordinateSource, TimeBasedWeatherValueFactory weatherValueFactory) { + super(idCoordinateSource, weatherValueFactory); this.connector = connector; - this.coordinateSource = coordinateSource; - this.weatherValueFactory = weatherValueFactory; } @Override @@ -82,7 +80,7 @@ public Map> getWeather( ClosedInterval timeInterval, Collection coordinates) { if (coordinates == null) return getWeather(timeInterval); Map> coordinatesToId = - coordinates.stream().collect(Collectors.toMap(point -> point, coordinateSource::getId)); + coordinates.stream().collect(Collectors.toMap(point -> point, idCoordinateSource::getId)); HashMap> coordinateToTimeSeries = new HashMap<>(); try (InfluxDB session = connector.getSession()) { for (Map.Entry> entry : coordinatesToId.entrySet()) { @@ -104,6 +102,19 @@ public Map> getWeather( return coordinateToTimeSeries; } + @Override + public Optional> getWeather(ZonedDateTime date, Point coordinate) { + Optional coordinateId = idCoordinateSource.getId(coordinate); + if (coordinateId.isEmpty()) { + return Optional.empty(); + } + try (InfluxDB session = connector.getSession()) { + String query = createQueryStringForCoordinateAndTime(date, coordinateId.get()); + QueryResult queryResult = session.query(new Query(query)); + return filterEmptyOptionals(optTimeBasedValueStream(queryResult)).findFirst(); + } + } + /** * Return the weather for the given time interval AND coordinate * @@ -113,7 +124,7 @@ public Map> getWeather( */ public IndividualTimeSeries getWeather( ClosedInterval timeInterval, Point coordinate) { - Optional coordinateId = coordinateSource.getId(coordinate); + Optional coordinateId = idCoordinateSource.getId(coordinate); if (coordinateId.isEmpty()) { return new IndividualTimeSeries<>(UUID.randomUUID(), Collections.emptySet()); } @@ -128,18 +139,7 @@ public IndividualTimeSeries getWeather( } } - @Override - public Optional> getWeather(ZonedDateTime date, Point coordinate) { - Optional coordinateId = coordinateSource.getId(coordinate); - if (coordinateId.isEmpty()) { - return Optional.empty(); - } - try (InfluxDB session = connector.getSession()) { - String query = createQueryStringForCoordinateAndTime(date, coordinateId.get()); - QueryResult queryResult = session.query(new Query(query)); - return filterEmptyOptionals(optTimeBasedValueStream(queryResult)).findFirst(); - } - } + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- /** * Parses an influxQL QueryResult and then transforms it into a Stream of optional @@ -150,7 +150,7 @@ private Stream>> optTimeBasedValueStream( QueryResult queryResult) { Map>> measurementsMap = InfluxDbConnector.parseQueryResult(queryResult, MEASUREMENT_NAME_WEATHER); - final String coordinateIdFieldName = weatherValueFactory.getCoordinateIdFieldString(); + final String coordinateIdFieldName = weatherFactory.getCoordinateIdFieldString(); return measurementsMap.get(MEASUREMENT_NAME_WEATHER).stream() .map( fieldToValue -> { @@ -168,10 +168,11 @@ private Stream>> optTimeBasedValueStream( /* Get the corresponding coordinate id from map AND REMOVE THE ENTRY !!! */ int coordinateId = Integer.parseInt(flatCaseFields.remove(coordinateIdFieldName)); - return coordinateSource + return idCoordinateSource .getCoordinate(coordinateId) .map(point -> new TimeBasedWeatherValueData(flatCaseFields, point)) - .flatMap(weatherValueFactory::get); + .map(weatherFactory::get) + .flatMap(Try::getData); }); } @@ -197,17 +198,17 @@ private String createQueryStringForTimeInterval(ClosedInterval ti } private String createTimeConstraint(ClosedInterval timeInterval) { - return weatherValueFactory.getTimeFieldString() + return weatherFactory.getTimeFieldString() + " >= " + timeInterval.getLower().toInstant().toEpochMilli() * MILLI_TO_NANO_FACTOR + AND - + weatherValueFactory.getTimeFieldString() + + weatherFactory.getTimeFieldString() + " <= " + timeInterval.getUpper().toInstant().toEpochMilli() * MILLI_TO_NANO_FACTOR; } private String createTimeConstraint(ZonedDateTime date) { - return weatherValueFactory.getTimeFieldString() + return weatherFactory.getTimeFieldString() + "=" + date.toInstant().toEpochMilli() * MILLI_TO_NANO_FACTOR; } diff --git a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlDataSource.java b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlDataSource.java index 08f8a959b..9cd4e413f 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlDataSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlDataSource.java @@ -7,22 +7,32 @@ import edu.ie3.datamodel.exceptions.InvalidColumnNameException; import edu.ie3.datamodel.io.connectors.SqlConnector; +import edu.ie3.datamodel.io.naming.DatabaseNamingStrategy; +import edu.ie3.datamodel.io.source.DataSource; +import edu.ie3.datamodel.models.UniqueEntity; import edu.ie3.util.StringUtils; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.*; +import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public abstract class SqlDataSource { +/** Contains all functions that are needed to read a SQL data source. */ +public class SqlDataSource implements DataSource { protected static final Logger log = LoggerFactory.getLogger(SqlDataSource.class); - private final SqlConnector connector; + protected final SqlConnector connector; + protected final DatabaseNamingStrategy databaseNamingStrategy; + protected String schemaName; - protected SqlDataSource(SqlConnector connector) { + public SqlDataSource( + SqlConnector connector, String schemaName, DatabaseNamingStrategy databaseNamingStrategy) { this.connector = connector; + this.schemaName = schemaName; + this.databaseNamingStrategy = databaseNamingStrategy; } /** @@ -34,7 +44,7 @@ protected SqlDataSource(SqlConnector connector) { * @return basic query string without semicolon */ protected static String createBaseQueryString(String schemaName, String tableName) { - return "SELECT * FROM " + schemaName + ".\"" + tableName + "\""; + return "SELECT * FROM " + schemaName + "." + tableName; } /** @@ -98,6 +108,12 @@ protected List getDbTables(String schemaPattern, String tableNamePattern return tableNames; } + @Override + public Stream> getSourceData(Class entityClass) { + String explicitTableName = databaseNamingStrategy.getEntityName(entityClass).orElseThrow(); + return buildStreamByTableName(explicitTableName); + } + /** * Interface for anonymous functions that are used as a parameter for {@link #executeQuery}. * @@ -117,34 +133,41 @@ interface AddParams { } /** - * Executes the prepared statement after possibly adding parameters to the query using the given - * function. Finally, processes the results and creates a list of time based values via field map - * extraction. - * - * @param query the query to use - * @param addParams function that possibly adds parameters to query - * @return a list of resulting entities + * Creates a stream with maps representing a data point in the SQL data source using an entity + * class. */ - protected List executeQuery(String query, AddParams addParams) { + protected Stream> buildStreamByEntityClass( + Class entityClass, AddParams addParams) { + String query = createBaseQueryString(schemaName, entityClass.getSimpleName()); + return executeQuery(query, addParams); + } + + /** + * Creates a stream with maps representing a data point in the SQL data source using an explicit + * table name. + */ + protected Stream> buildStreamByTableName(String tableName) { + String query = createBaseQueryString(schemaName, tableName); + return executeQuery(query); + } + + /** + * Creates a stream with maps representing a data point in the SQL data source using an explicit + * table name. + */ + protected Stream> executeQuery(String query, AddParams addParams) { try (PreparedStatement ps = connector.getConnection().prepareStatement(query)) { addParams.addParams(ps); ResultSet resultSet = ps.executeQuery(); - List> fieldMaps = connector.extractFieldMaps(resultSet); - - return fieldMaps.stream().map(this::createEntity).flatMap(Optional::stream).toList(); + return connector.extractFieldMaps(resultSet).stream(); } catch (SQLException e) { log.error("Error during execution of query {}", query, e); } - - return Collections.emptyList(); + return Stream.empty(); } - /** - * Instantiates an entity produced by this source given the required field value map. - * - * @param fieldToValues map of fields to their respective values - * @return the entity if instantiation succeeds - */ - protected abstract Optional createEntity(Map fieldToValues); + protected Stream> executeQuery(String query) { + return executeQuery(query, x -> {}); + } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlIdCoordinateSource.java b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlIdCoordinateSource.java new file mode 100644 index 000000000..ee56555cd --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlIdCoordinateSource.java @@ -0,0 +1,277 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.source.sql; + +import static edu.ie3.datamodel.io.source.sql.SqlDataSource.createBaseQueryString; + +import edu.ie3.datamodel.io.connectors.SqlConnector; +import edu.ie3.datamodel.io.factory.SimpleFactoryData; +import edu.ie3.datamodel.io.factory.timeseries.SqlIdCoordinateFactory; +import edu.ie3.datamodel.io.naming.DatabaseNamingStrategy; +import edu.ie3.datamodel.io.source.IdCoordinateSource; +import edu.ie3.datamodel.models.value.CoordinateValue; +import edu.ie3.util.geo.CoordinateDistance; +import edu.ie3.util.geo.GeoUtils; +import java.sql.Array; +import java.sql.PreparedStatement; +import java.util.*; +import javax.measure.quantity.Length; +import org.apache.commons.lang3.tuple.Pair; +import org.locationtech.jts.geom.Envelope; +import org.locationtech.jts.geom.Point; +import tech.units.indriya.ComparableQuantity; + +/** SQL source for coordinate data */ +public class SqlIdCoordinateSource implements IdCoordinateSource { + private static final String WHERE = " WHERE "; + + /** + * Queries that are available within this source. Motivation to have them as field value is to + * avoid creating a new string each time, bc they're always the same. + */ + private final String basicQuery; + + private final String queryForPoint; + private final String queryForPoints; + private final String queryForId; + private final String queryForBoundingBox; + private final String queryForNearestPoints; + + private final SqlDataSource dataSource; + + private final SqlIdCoordinateFactory factory; + + public SqlIdCoordinateSource( + SqlIdCoordinateFactory factory, String coordinateTableName, SqlDataSource dataSource) { + this.factory = factory; + this.dataSource = dataSource; + + String dbIdColumnName = dataSource.getDbColumnName(factory.getIdField(), coordinateTableName); + String dbPointColumnName = + dataSource.getDbColumnName(factory.getCoordinateField(), coordinateTableName); + + // setup queries + this.basicQuery = createBaseQueryString(dataSource.schemaName, coordinateTableName); + this.queryForPoint = createQueryForPoint(dbIdColumnName); + this.queryForPoints = createQueryForPoints(dbIdColumnName); + this.queryForId = createQueryForId(dbPointColumnName); + this.queryForBoundingBox = createQueryForBoundingBox(dbPointColumnName); + this.queryForNearestPoints = + createQueryForNearestPoints( + dataSource.schemaName, coordinateTableName, dbIdColumnName, dbPointColumnName); + } + + /** + * Initializes a new SqlIdCoordinateSource + * + * @param connector the connector needed for the database connection + * @param schemaName the database schema to use + * @param coordinateTableName the name of the table containing coordinate data + * @param factory instance of a coordinate factory + */ + public SqlIdCoordinateSource( + SqlConnector connector, + String schemaName, + String coordinateTableName, + SqlIdCoordinateFactory factory) { + this( + factory, + coordinateTableName, + new SqlDataSource(connector, schemaName, new DatabaseNamingStrategy())); + } + + @Override + public Optional getCoordinate(int id) { + List values = executeQueryToList(queryForPoint, ps -> ps.setInt(1, id)); + + if (values.isEmpty()) { + return Optional.empty(); + } else { + return Optional.of(values.get(0).coordinate); + } + } + + @Override + public Collection getCoordinates(int... ids) { + Object[] idSet = Arrays.stream(ids).boxed().distinct().toArray(); + + List values = + executeQueryToList( + queryForPoints, + ps -> { + Array sqlArray = ps.getConnection().createArrayOf("int", idSet); + ps.setArray(1, sqlArray); + }); + + return values.stream().map(value -> value.coordinate).toList(); + } + + @Override + public Optional getId(Point coordinate) { + double latitude = coordinate.getY(); + double longitude = coordinate.getX(); + + List values = + executeQueryToList( + queryForId, + ps -> { + ps.setDouble(1, longitude); + ps.setDouble(2, latitude); + }); + + if (values.isEmpty()) { + return Optional.empty(); + } else { + return Optional.of(values.get(0).id); + } + } + + @Override + public Collection getAllCoordinates() { + List values = executeQueryToList(basicQuery + ";", PreparedStatement::execute); + + return values.stream().map(value -> value.coordinate).toList(); + } + + @Override + public List getNearestCoordinates(Point coordinate, int n) { + List values = + executeQueryToList( + queryForNearestPoints, + ps -> { + ps.setDouble(1, coordinate.getX()); + ps.setDouble(2, coordinate.getY()); + ps.setInt(3, n); + }); + + List points = values.stream().map(value -> value.coordinate).toList(); + return calculateCoordinateDistances(coordinate, n, points); + } + + @Override + public List getClosestCoordinates( + Point coordinate, int n, ComparableQuantity distance) { + Envelope envelope = GeoUtils.calculateBoundingBox(coordinate, distance); + + List values = + executeQueryToList( + queryForBoundingBox, + ps -> { + ps.setDouble(1, envelope.getMinX()); + ps.setDouble(2, envelope.getMinY()); + ps.setDouble(3, envelope.getMaxX()); + ps.setDouble(4, envelope.getMaxY()); + }); + + List points = values.stream().map(value -> value.coordinate).toList(); + + return calculateCoordinateDistances(coordinate, n, points); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + + private Optional createCoordinateValue(Map fieldToValues) { + fieldToValues.remove("distance"); + + SimpleFactoryData simpleFactoryData = new SimpleFactoryData(fieldToValues, Pair.class); + Optional> pair = factory.get(simpleFactoryData).getData(); + + if (pair.isEmpty()) { + return Optional.empty(); + } else { + Pair data = pair.get(); + return Optional.of(new CoordinateValue(data.getKey(), data.getValue())); + } + } + + private List executeQueryToList( + String query, SqlDataSource.AddParams addParams) { + return dataSource + .executeQuery(query, addParams) + .map(this::createCoordinateValue) + .flatMap(Optional::stream) + .toList(); + } + + /** + * Creates a basic query to retrieve entries for given ids with the following pattern:
+ * {@code WHERE =?;} + * + * @param idColumn the name of the column holding the id info + * @return the query string + */ + private String createQueryForPoint(String idColumn) { + return basicQuery + WHERE + idColumn + " =?; "; + } + + /** + * Creates a basic query to retrieve entries for given ids with the following pattern:
+ * {@code WHERE = ANY (?);} + * + * @param idColumn the name of the column holding the id info + * @return the query string + */ + private String createQueryForPoints(String idColumn) { + return basicQuery + WHERE + idColumn + " = ANY (?); "; + } + + /** + * Creates a basic query to retrieve an id for a given point with the following pattern:
+ * {@code WHERE = ST_Point( ?, ?);} + * + * @param pointColumn the name of the column holding the geometry information + * @return the query string + */ + private String createQueryForId(String pointColumn) { + return basicQuery + WHERE + pointColumn + " = ST_Point( ?, ?); "; + } + + /** + * Creates a basic query to retrieve all entries in a given box. The box is defines by a latitude + * interval and a longitude interval. The intervals are provided via an envelope. The pattern + * looks like this:
+ * {@code WHERE ST_Intersects(ST_MakeEnvelope(?, ?, ?, ?, 4326 ) , ) + * ;} + * + * @param pointColumn the name of the column holding the geometry information + * @return the query string + */ + private String createQueryForBoundingBox(String pointColumn) { + return basicQuery + + WHERE + + " ST_Intersects(ST_MakeEnvelope(?, ?, ?, ?, 4326 ) , " + + pointColumn + + ");"; + } + + /** + * Creates a query to retrieve the nearest n entries. The pattern looks like this:
+ * {@code SELECT AS id, AS coordinate, <-> + * ST_Point( ?, ?) AS distance FROM . ORDER BY distance LIMIT ?;} + * + * @param schemaName the name of the database schema + * @param tableName the name of the database table + * @param idColumn the name of the column holding the id information + * @param pointColumn the name of the column holding the geometry information + * @return the query string + */ + private String createQueryForNearestPoints( + String schemaName, String tableName, String idColumn, String pointColumn) { + return "SELECT " + + idColumn + + " AS id , " + + pointColumn + + " AS coordinate, " + + pointColumn + + " <-> ST_Point( ?, ?) AS distance " + + "FROM " + + schemaName + + ".\"" + + tableName + + "\"" + + " ORDER BY distance LIMIT ?;"; + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMappingSource.java b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMappingSource.java index f49a8aa4e..65f65f436 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMappingSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMappingSource.java @@ -5,61 +5,36 @@ */ package edu.ie3.datamodel.io.source.sql; +import static edu.ie3.datamodel.io.source.sql.SqlDataSource.createBaseQueryString; + import edu.ie3.datamodel.io.connectors.SqlConnector; -import edu.ie3.datamodel.io.factory.SimpleEntityData; -import edu.ie3.datamodel.io.factory.timeseries.TimeSeriesMappingFactory; +import edu.ie3.datamodel.io.naming.DatabaseNamingStrategy; import edu.ie3.datamodel.io.naming.EntityPersistenceNamingStrategy; import edu.ie3.datamodel.io.source.TimeSeriesMappingSource; import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.stream.Collectors; - -public class SqlTimeSeriesMappingSource - extends SqlDataSource - implements TimeSeriesMappingSource { - private static final TimeSeriesMappingFactory mappingFactory = new TimeSeriesMappingFactory(); +import java.util.stream.Stream; +public class SqlTimeSeriesMappingSource extends TimeSeriesMappingSource { private final EntityPersistenceNamingStrategy entityPersistenceNamingStrategy; private final String queryFull; - private final String schemaName; + private final SqlDataSource dataSource; public SqlTimeSeriesMappingSource( SqlConnector connector, String schemaName, EntityPersistenceNamingStrategy entityPersistenceNamingStrategy) { - super(connector); + this.dataSource = + new SqlDataSource( + connector, schemaName, new DatabaseNamingStrategy(entityPersistenceNamingStrategy)); this.entityPersistenceNamingStrategy = entityPersistenceNamingStrategy; final String tableName = entityPersistenceNamingStrategy.getEntityName(MappingEntry.class).orElseThrow(); this.queryFull = createBaseQueryString(schemaName, tableName); - - this.schemaName = schemaName; - } - - @Override - public Map getMapping() { - return executeQuery(queryFull, ps -> {}).stream() - .collect(Collectors.toMap(MappingEntry::getParticipant, MappingEntry::getTimeSeries)); - } - - /** - * @deprecated since 3.0. Use {@link - * SqlTimeSeriesMetaInformationSource#getTimeSeriesMetaInformation()} instead - */ - @Override - @Deprecated(since = "3.0", forRemoval = true) - public Optional - getTimeSeriesMetaInformation(UUID timeSeriesUuid) { - return getDbTables(schemaName, "%" + timeSeriesUuid.toString()).stream() - .findFirst() - .map(entityPersistenceNamingStrategy::extractIndividualTimesSeriesMetaInformation); } @Override - protected Optional createEntity(Map fieldToValues) { - SimpleEntityData entityData = new SimpleEntityData(fieldToValues, MappingEntry.class); - return mappingFactory.get(entityData); + public Stream> getMappingSourceData() { + return dataSource.executeQuery(queryFull); } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSource.java b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSource.java index f70a5e94f..ad01ebe62 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSource.java @@ -20,9 +20,7 @@ import java.util.stream.Collectors; /** SQL implementation for retrieving {@link TimeSeriesMetaInformationSource} from the SQL scheme */ -public class SqlTimeSeriesMetaInformationSource - extends SqlDataSource - implements TimeSeriesMetaInformationSource { +public class SqlTimeSeriesMetaInformationSource implements TimeSeriesMetaInformationSource { private static final TimeSeriesMetaInformationFactory mappingFactory = new TimeSeriesMetaInformationFactory(); @@ -30,15 +28,20 @@ public class SqlTimeSeriesMetaInformationSource private final DatabaseNamingStrategy namingStrategy; private final Map mapping; + private final SqlDataSource dataSource; + public SqlTimeSeriesMetaInformationSource( - SqlConnector connector, String schemaName, DatabaseNamingStrategy namingStrategy) { - super(connector); - this.namingStrategy = namingStrategy; + SqlConnector connector, String schemaName, DatabaseNamingStrategy databaseNamingStrategy) { + this.dataSource = new SqlDataSource(connector, schemaName, databaseNamingStrategy); + this.namingStrategy = databaseNamingStrategy; String queryComplete = createQueryComplete(schemaName); this.mapping = - executeQuery(queryComplete, ps -> {}).stream() + dataSource + .executeQuery(queryComplete) + .map(this::createEntity) + .flatMap(Optional::stream) .collect( Collectors.toMap( IndividualTimeSeriesMetaInformation::getUuid, Function.identity())); @@ -58,7 +61,7 @@ private String createQueryComplete(String schemaName) { namingStrategy::getTimeSeriesEntityName, columnScheme -> columnScheme)); Iterable selectQueries = - getDbTables(schemaName, namingStrategy.getTimeSeriesPrefix() + "%").stream() + dataSource.getDbTables(schemaName, namingStrategy.getTimeSeriesPrefix() + "%").stream() .map( tableName -> Optional.ofNullable(acceptedTableNames.get(tableName)) @@ -87,11 +90,10 @@ public Optional getTimeSeriesMetaInformatio return Optional.ofNullable(this.mapping.get(timeSeriesUuid)); } - @Override - protected Optional createEntity( + private Optional createEntity( Map fieldToValues) { SimpleEntityData entityData = new SimpleEntityData(fieldToValues, IndividualTimeSeriesMetaInformation.class); - return mappingFactory.get(entityData); + return mappingFactory.get(entityData).getData(); } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesSource.java b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesSource.java index 71511a069..126aefae5 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesSource.java @@ -5,9 +5,10 @@ */ package edu.ie3.datamodel.io.source.sql; +import static edu.ie3.datamodel.io.source.sql.SqlDataSource.createBaseQueryString; + import edu.ie3.datamodel.exceptions.SourceException; import edu.ie3.datamodel.io.connectors.SqlConnector; -import edu.ie3.datamodel.io.factory.timeseries.SimpleTimeBasedValueData; import edu.ie3.datamodel.io.factory.timeseries.TimeBasedSimpleValueFactory; import edu.ie3.datamodel.io.naming.DatabaseNamingStrategy; import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme; @@ -21,15 +22,20 @@ import java.sql.Timestamp; import java.time.ZonedDateTime; import java.util.*; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -public class SqlTimeSeriesSource extends SqlDataSource> - implements TimeSeriesSource { - private static final String WHERE = " WHERE "; - private static final String TIME_SERIES = "time_series"; +public class SqlTimeSeriesSource extends TimeSeriesSource { + + protected static final Logger log = LoggerFactory.getLogger(SqlTimeSeriesSource.class); + private final SqlDataSource dataSource; private final UUID timeSeriesUuid; - private final Class valueClass; - private final TimeBasedSimpleValueFactory valueFactory; + + // General fields + private static final String WHERE = " WHERE "; + private static final String TIME_SERIES = "time_series"; /** * Queries that are available within this source. Motivation to have them as field value is to @@ -40,6 +46,56 @@ public class SqlTimeSeriesSource extends SqlDataSource valueClass, + TimeBasedSimpleValueFactory factory) { + super(valueClass, factory); + this.dataSource = sqlDataSource; + + this.timeSeriesUuid = timeSeriesUuid; + + this.valueClass = valueClass; + this.valueFactory = factory; + + final ColumnScheme columnScheme = ColumnScheme.parse(valueClass).orElseThrow(); + final String tableName = + sqlDataSource.databaseNamingStrategy.getTimeSeriesEntityName(columnScheme); + + String dbTimeColumnName = + sqlDataSource.getDbColumnName(factory.getTimeFieldString(), tableName); + + this.queryFull = createQueryFull(sqlDataSource.schemaName, tableName); + this.queryTimeInterval = + createQueryForTimeInterval(sqlDataSource.schemaName, tableName, dbTimeColumnName); + this.queryTime = createQueryForTime(sqlDataSource.schemaName, tableName, dbTimeColumnName); + } + + /** + * Initializes a new SqlTimeSeriesSource + * + * @param connector the connector needed for database connection + * @param schemaName the database schema to use + * @param namingStrategy the naming strategy for database entities + * @param timeSeriesUuid the uuid of the time series + * @param valueClass the class of returned time series values + * @param factory a factory that parses the input data + */ + public SqlTimeSeriesSource( + SqlConnector connector, + String schemaName, + DatabaseNamingStrategy namingStrategy, + UUID timeSeriesUuid, + Class valueClass, + TimeBasedSimpleValueFactory factory) { + this( + new SqlDataSource(connector, schemaName, namingStrategy), + timeSeriesUuid, + valueClass, + factory); + } + /** * Factory method to build a source from given meta information * @@ -81,63 +137,46 @@ private static SqlTimeSeriesSource create( connector, schemaName, namingStrategy, timeSeriesUuid, valClass, valueFactory); } - /** - * Initializes a new SqlTimeSeriesSource - * - * @param connector the connector needed for database connection - * @param schemaName the database schema to use - * @param namingStrategy the naming strategy for database entities - * @param timeSeriesUuid the uuid of the time series - * @param valueClass the class of returned time series values - * @param factory a factory that parses the input data - */ - public SqlTimeSeriesSource( - SqlConnector connector, - String schemaName, - DatabaseNamingStrategy namingStrategy, - UUID timeSeriesUuid, - Class valueClass, - TimeBasedSimpleValueFactory factory) { - super(connector); - this.timeSeriesUuid = timeSeriesUuid; - this.valueClass = valueClass; - this.valueFactory = factory; - final ColumnScheme columnScheme = ColumnScheme.parse(valueClass).orElseThrow(); - final String tableName = namingStrategy.getTimeSeriesEntityName(columnScheme); - - String dbTimeColumnName = getDbColumnName(factory.getTimeFieldString(), tableName); - - this.queryFull = createQueryFull(schemaName, tableName); - this.queryTimeInterval = createQueryForTimeInterval(schemaName, tableName, dbTimeColumnName); - this.queryTime = createQueryForTime(schemaName, tableName, dbTimeColumnName); - } + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- @Override public IndividualTimeSeries getTimeSeries() { - List> timeBasedValues = executeQuery(queryFull, ps -> {}); - return new IndividualTimeSeries<>(timeSeriesUuid, new HashSet<>(timeBasedValues)); + Set> timeBasedValues = getTimeBasedValueSet(queryFull, ps -> {}); + return new IndividualTimeSeries<>(timeSeriesUuid, timeBasedValues); } @Override public IndividualTimeSeries getTimeSeries(ClosedInterval timeInterval) { - List> timeBasedValues = - executeQuery( + Set> timeBasedValues = + getTimeBasedValueSet( queryTimeInterval, ps -> { ps.setTimestamp(1, Timestamp.from(timeInterval.getLower().toInstant())); ps.setTimestamp(2, Timestamp.from(timeInterval.getUpper().toInstant())); }); - return new IndividualTimeSeries<>(timeSeriesUuid, new HashSet<>(timeBasedValues)); + return new IndividualTimeSeries<>(timeSeriesUuid, timeBasedValues); } @Override public Optional getValue(ZonedDateTime time) { - List> timeBasedValues = - executeQuery(queryTime, ps -> ps.setTimestamp(1, Timestamp.from(time.toInstant()))); + Set> timeBasedValues = + getTimeBasedValueSet(queryTime, ps -> ps.setTimestamp(1, Timestamp.from(time.toInstant()))); if (timeBasedValues.isEmpty()) return Optional.empty(); if (timeBasedValues.size() > 1) log.warn("Retrieved more than one result value, using the first"); - return Optional.of(timeBasedValues.get(0).getValue()); + return Optional.of(timeBasedValues.stream().toList().get(0).getValue()); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + + /** Creates a set of TimeBasedValues from database */ + private Set> getTimeBasedValueSet( + String query, SqlDataSource.AddParams addParams) { + return dataSource + .executeQuery(query, addParams) + .map(this::createEntity) + .flatMap(Optional::stream) + .collect(Collectors.toSet()); } /** @@ -147,11 +186,9 @@ public Optional getValue(ZonedDateTime time) { * @param fieldToValues Mapping from field id to values * @return Optional simple time based value */ - protected Optional> createEntity(Map fieldToValues) { + private Optional> createEntity(Map fieldToValues) { fieldToValues.remove("timeSeries"); - SimpleTimeBasedValueData factoryData = - new SimpleTimeBasedValueData<>(fieldToValues, valueClass); - return valueFactory.get(factoryData); + return createTimeBasedValue(fieldToValues).getData(); } /** diff --git a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlWeatherSource.java b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlWeatherSource.java index ab38ac5f0..d2aab44e1 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlWeatherSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlWeatherSource.java @@ -5,9 +5,12 @@ */ package edu.ie3.datamodel.io.source.sql; +import static edu.ie3.datamodel.io.source.sql.SqlDataSource.createBaseQueryString; + +import edu.ie3.datamodel.exceptions.SourceException; import edu.ie3.datamodel.io.connectors.SqlConnector; -import edu.ie3.datamodel.io.factory.timeseries.TimeBasedWeatherValueData; import edu.ie3.datamodel.io.factory.timeseries.TimeBasedWeatherValueFactory; +import edu.ie3.datamodel.io.naming.DatabaseNamingStrategy; import edu.ie3.datamodel.io.source.IdCoordinateSource; import edu.ie3.datamodel.io.source.WeatherSource; import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries; @@ -21,13 +24,12 @@ import org.locationtech.jts.geom.Point; /** SQL source for weather data */ -public class SqlWeatherSource extends SqlDataSource> - implements WeatherSource { - private static final String WHERE = " WHERE "; +public class SqlWeatherSource extends WeatherSource { + + private final SqlDataSource dataSource; - private final IdCoordinateSource idCoordinateSource; + private static final String WHERE = " WHERE "; private final String factoryCoordinateFieldName; - private final TimeBasedWeatherValueFactory weatherFactory; /** * Queries that are available within this source. Motivation to have them as field value is to @@ -53,14 +55,14 @@ public SqlWeatherSource( String schemaName, String weatherTableName, TimeBasedWeatherValueFactory weatherFactory) { - super(connector); - this.idCoordinateSource = idCoordinateSource; - this.weatherFactory = weatherFactory; + super(idCoordinateSource, weatherFactory); this.factoryCoordinateFieldName = weatherFactory.getCoordinateIdFieldString(); + this.dataSource = new SqlDataSource(connector, schemaName, new DatabaseNamingStrategy()); String dbTimeColumnName = - getDbColumnName(weatherFactory.getTimeFieldString(), weatherTableName); - String dbCoordinateIdColumnName = getDbColumnName(factoryCoordinateFieldName, weatherTableName); + dataSource.getDbColumnName(weatherFactory.getTimeFieldString(), weatherTableName); + String dbCoordinateIdColumnName = + dataSource.getDbColumnName(factoryCoordinateFieldName, weatherTableName); // setup queries this.queryTimeInterval = @@ -75,20 +77,23 @@ public SqlWeatherSource( @Override public Map> getWeather( - ClosedInterval timeInterval) { + ClosedInterval timeInterval) throws SourceException { List> timeBasedValues = - executeQuery( - queryTimeInterval, - ps -> { - ps.setTimestamp(1, Timestamp.from(timeInterval.getLower().toInstant())); - ps.setTimestamp(2, Timestamp.from(timeInterval.getUpper().toInstant())); - }); + buildTimeBasedValues( + weatherFactory, + dataSource.executeQuery( + queryTimeInterval, + ps -> { + ps.setTimestamp(1, Timestamp.from(timeInterval.getLower().toInstant())); + ps.setTimestamp(2, Timestamp.from(timeInterval.getUpper().toInstant())); + })); return mapWeatherValuesToPoints(timeBasedValues); } @Override public Map> getWeather( - ClosedInterval timeInterval, Collection coordinates) { + ClosedInterval timeInterval, Collection coordinates) + throws SourceException { Set coordinateIds = coordinates.stream() .map(idCoordinateSource::getId) @@ -100,21 +105,24 @@ public Map> getWeather( } List> timeBasedValues = - executeQuery( - queryTimeIntervalAndCoordinates, - ps -> { - Array coordinateIdArr = - ps.getConnection().createArrayOf("integer", coordinateIds.toArray()); - ps.setArray(1, coordinateIdArr); - ps.setTimestamp(2, Timestamp.from(timeInterval.getLower().toInstant())); - ps.setTimestamp(3, Timestamp.from(timeInterval.getUpper().toInstant())); - }); + buildTimeBasedValues( + weatherFactory, + dataSource.executeQuery( + queryTimeIntervalAndCoordinates, + ps -> { + Array coordinateIdArr = + ps.getConnection().createArrayOf("integer", coordinateIds.toArray()); + ps.setArray(1, coordinateIdArr); + ps.setTimestamp(2, Timestamp.from(timeInterval.getLower().toInstant())); + ps.setTimestamp(3, Timestamp.from(timeInterval.getUpper().toInstant())); + })); return mapWeatherValuesToPoints(timeBasedValues); } @Override - public Optional> getWeather(ZonedDateTime date, Point coordinate) { + public Optional> getWeather(ZonedDateTime date, Point coordinate) + throws SourceException { Optional coordinateId = idCoordinateSource.getId(coordinate); if (coordinateId.isEmpty()) { log.warn("Unable to match coordinate {} to a coordinate ID", coordinate); @@ -122,12 +130,14 @@ public Optional> getWeather(ZonedDateTime date, Poi } List> timeBasedValues = - executeQuery( - queryTimeAndCoordinate, - ps -> { - ps.setInt(1, coordinateId.get()); - ps.setTimestamp(2, Timestamp.from(date.toInstant())); - }); + buildTimeBasedValues( + weatherFactory, + dataSource.executeQuery( + queryTimeAndCoordinate, + ps -> { + ps.setInt(1, coordinateId.get()); + ps.setTimestamp(2, Timestamp.from(date.toInstant())); + })); if (timeBasedValues.isEmpty()) return Optional.empty(); if (timeBasedValues.size() > 1) @@ -135,6 +145,8 @@ public Optional> getWeather(ZonedDateTime date, Poi return Optional.of(timeBasedValues.get(0)); } + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + /** * Creates a base query to retrieve all entities in the given time frame with the following * pattern:
@@ -200,62 +212,4 @@ private String createQueryStringForTimeIntervalAndCoordinates( + timeColumnName + " BETWEEN ? AND ?;"; } - - /** - * Converts a field to value map into a TimeBasedValue, removes the "tid" - * - * @param fieldMap the field to value map for one TimeBasedValue - * @return an Optional of that TimeBasedValue - */ - @Override - protected Optional> createEntity(Map fieldMap) { - fieldMap.remove("tid"); - Optional data = toTimeBasedWeatherValueData(fieldMap); - if (data.isEmpty()) return Optional.empty(); - return weatherFactory.get(data.get()); - } - - /** - * Converts a field to value map into TimeBasedWeatherValueData, extracts the coordinate id from - * the field map and uses the {@link IdCoordinateSource} to map it to a point - * - * @param fieldMap the field to value map for one TimeBasedValue - * @return the TimeBasedWeatherValueData - */ - private Optional toTimeBasedWeatherValueData( - Map fieldMap) { - String coordinateValue = fieldMap.remove(factoryCoordinateFieldName); - fieldMap.putIfAbsent("uuid", UUID.randomUUID().toString()); - int coordinateId = Integer.parseInt(coordinateValue); - Optional coordinate = idCoordinateSource.getCoordinate(coordinateId); - if (coordinate.isEmpty()) { - log.warn("Unable to match coordinate ID {} to a point", coordinateId); - return Optional.empty(); - } - return Optional.of(new TimeBasedWeatherValueData(fieldMap, coordinate.get())); - } - - /** - * Maps a collection of TimeBasedValues into time series for each contained coordinate point - * - * @param timeBasedValues the values to map - * @return a map of coordinate point to time series - */ - private Map> mapWeatherValuesToPoints( - Collection> timeBasedValues) { - Map>> coordinateToValues = - timeBasedValues.stream() - .collect( - Collectors.groupingBy( - timeBasedWeatherValue -> timeBasedWeatherValue.getValue().getCoordinate(), - Collectors.toSet())); - Map> coordinateToTimeSeries = new HashMap<>(); - for (Map.Entry>> entry : - coordinateToValues.entrySet()) { - Set> values = entry.getValue(); - IndividualTimeSeries timeSeries = new IndividualTimeSeries<>(null, values); - coordinateToTimeSeries.put(entry.getKey(), timeSeries); - } - return coordinateToTimeSeries; - } } diff --git a/src/main/java/edu/ie3/datamodel/models/UniqueEntity.java b/src/main/java/edu/ie3/datamodel/models/UniqueEntity.java index 030f9dc61..333056974 100644 --- a/src/main/java/edu/ie3/datamodel/models/UniqueEntity.java +++ b/src/main/java/edu/ie3/datamodel/models/UniqueEntity.java @@ -52,7 +52,7 @@ public String toString() { * @version 0.1 * @since 05.06.20 */ - protected abstract static class UniqueEntityCopyBuilder + public abstract static class UniqueEntityCopyBuilder implements UniqueEntityBuilder { private UUID uuid; @@ -61,16 +61,16 @@ protected UniqueEntityCopyBuilder(UniqueEntity entity) { this.uuid = entity.getUuid(); } - public T uuid(UUID uuid) { + public B uuid(UUID uuid) { this.uuid = uuid; - return childInstance(); + return thisInstance(); } protected UUID getUuid() { return uuid; } - protected abstract T childInstance(); + protected abstract B thisInstance(); } protected interface UniqueEntityBuilder { diff --git a/src/main/java/edu/ie3/datamodel/models/input/AssetInput.java b/src/main/java/edu/ie3/datamodel/models/input/AssetInput.java index 646ac245e..a0840fbcc 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/AssetInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/AssetInput.java @@ -58,7 +58,7 @@ public String getId() { return id; } - public abstract UniqueEntityBuilder copy(); + public abstract AssetInputCopyBuilder copy(); @Override public boolean equals(Object o) { @@ -95,8 +95,8 @@ public String toString() { * @version 0.1 * @since 05.06.20 */ - protected abstract static class AssetInputCopyBuilder> - extends UniqueEntityCopyBuilder { + public abstract static class AssetInputCopyBuilder> + extends UniqueEntityCopyBuilder { private String id; private OperatorInput operator; @@ -109,19 +109,19 @@ protected AssetInputCopyBuilder(AssetInput entity) { this.operationTime = entity.getOperationTime(); } - public T id(String id) { + public B id(String id) { this.id = id; - return childInstance(); + return thisInstance(); } - public T operator(OperatorInput operator) { + public B operator(OperatorInput operator) { this.operator = operator; - return childInstance(); + return thisInstance(); } - public T operationTime(OperationTime operationTime) { + public B operationTime(OperationTime operationTime) { this.operationTime = operationTime; - return childInstance(); + return thisInstance(); } protected String getId() { @@ -140,6 +140,6 @@ protected OperationTime getOperationTime() { public abstract AssetInput build(); @Override - protected abstract T childInstance(); + protected abstract B thisInstance(); } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/MeasurementUnitInput.java b/src/main/java/edu/ie3/datamodel/models/input/MeasurementUnitInput.java index 07688612d..11e90713c 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/MeasurementUnitInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/MeasurementUnitInput.java @@ -98,6 +98,7 @@ public boolean getQ() { return q; } + @Override public MeasurementUnitInputCopyBuilder copy() { return new MeasurementUnitInputCopyBuilder(this); } @@ -206,7 +207,7 @@ public MeasurementUnitInputCopyBuilder q(boolean q) { } @Override - protected MeasurementUnitInputCopyBuilder childInstance() { + protected MeasurementUnitInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/NodeInput.java b/src/main/java/edu/ie3/datamodel/models/input/NodeInput.java index b70a095b1..0366964e0 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/NodeInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/NodeInput.java @@ -115,6 +115,7 @@ public int getSubnet() { return subnet; } + @Override public NodeInputCopyBuilder copy() { return new NodeInputCopyBuilder(this); } @@ -226,7 +227,7 @@ public NodeInputCopyBuilder subnet(int subnet) { } @Override - protected NodeInputCopyBuilder childInstance() { + protected NodeInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/OperatorInput.java b/src/main/java/edu/ie3/datamodel/models/input/OperatorInput.java index b65422ae2..1fc2005cf 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/OperatorInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/OperatorInput.java @@ -83,7 +83,7 @@ public OperatorInputCopyBuilder id(String id) { } @Override - protected OperatorInputCopyBuilder childInstance() { + protected OperatorInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/connector/ConnectorInput.java b/src/main/java/edu/ie3/datamodel/models/input/connector/ConnectorInput.java index dd7ed2028..cc5965af3 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/connector/ConnectorInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/connector/ConnectorInput.java @@ -127,8 +127,8 @@ public String toString() { * @version 0.1 * @since 05.06.20 */ - abstract static class ConnectorInputCopyBuilder> - extends AssetInputCopyBuilder { + public abstract static class ConnectorInputCopyBuilder> + extends AssetInputCopyBuilder { private NodeInput nodeA; private NodeInput nodeB; @@ -141,19 +141,19 @@ protected ConnectorInputCopyBuilder(ConnectorInput entity) { this.parallelDevices = entity.getParallelDevices(); } - public T nodeA(NodeInput nodeA) { + public B nodeA(NodeInput nodeA) { this.nodeA = nodeA; - return childInstance(); + return thisInstance(); } - public T nodeB(NodeInput nodeB) { + public B nodeB(NodeInput nodeB) { this.nodeB = nodeB; - return childInstance(); + return thisInstance(); } - public T parallelDevices(int parallelDevices) { + public B parallelDevices(int parallelDevices) { this.parallelDevices = parallelDevices; - return childInstance(); + return thisInstance(); } protected NodeInput getNodeA() { @@ -172,6 +172,6 @@ protected int getParallelDevices() { public abstract ConnectorInput build(); @Override - protected abstract T childInstance(); + protected abstract B thisInstance(); } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/connector/LineInput.java b/src/main/java/edu/ie3/datamodel/models/input/connector/LineInput.java index c5bbe27ed..2d77bd425 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/connector/LineInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/connector/LineInput.java @@ -117,6 +117,7 @@ public OlmCharacteristicInput getOlmCharacteristic() { return olmCharacteristic; } + @Override public LineInputCopyBuilder copy() { return new LineInputCopyBuilder(this); } @@ -223,7 +224,7 @@ public LineInputCopyBuilder olmCharacteristic(OlmCharacteristicInput olmCharacte } @Override - protected LineInputCopyBuilder childInstance() { + protected LineInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/connector/SwitchInput.java b/src/main/java/edu/ie3/datamodel/models/input/connector/SwitchInput.java index 6e36b1ba0..59762ea43 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/connector/SwitchInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/connector/SwitchInput.java @@ -57,6 +57,7 @@ public boolean isClosed() { return closed; } + @Override public SwitchInputCopyBuilder copy() { return new SwitchInputCopyBuilder(this); } @@ -126,7 +127,7 @@ public SwitchInputCopyBuilder closed(boolean closed) { } @Override - protected SwitchInputCopyBuilder childInstance() { + protected SwitchInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/connector/Transformer2WInput.java b/src/main/java/edu/ie3/datamodel/models/input/connector/Transformer2WInput.java index 99b0bc9ac..435f49a3a 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/connector/Transformer2WInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/connector/Transformer2WInput.java @@ -77,6 +77,7 @@ public Transformer2WInput( this.type = type; } + @Override public Transformer2WInputCopyBuilder copy() { return new Transformer2WInputCopyBuilder(this); } @@ -160,7 +161,7 @@ public Transformer2WInputCopyBuilder type(Transformer2WTypeInput type) { } @Override - protected Transformer2WInputCopyBuilder childInstance() { + protected Transformer2WInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/connector/Transformer3WInput.java b/src/main/java/edu/ie3/datamodel/models/input/connector/Transformer3WInput.java index 54bafaa90..3e53dda9d 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/connector/Transformer3WInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/connector/Transformer3WInput.java @@ -232,6 +232,7 @@ public NodeInput getNodeInternal() { return nodeInternal; } + @Override public Transformer3WInputCopyBuilder copy() { return new Transformer3WInputCopyBuilder(this); } @@ -335,7 +336,7 @@ public Transformer3WInputCopyBuilder internalSlack(boolean internalNodeIsSlack) } @Override - protected Transformer3WInputCopyBuilder childInstance() { + protected Transformer3WInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/connector/TransformerInput.java b/src/main/java/edu/ie3/datamodel/models/input/connector/TransformerInput.java index cf0b42127..903bcf94d 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/connector/TransformerInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/connector/TransformerInput.java @@ -81,7 +81,7 @@ public int getTapPos() { } @Override - public abstract TransformerInputCopyBuilder copy(); + public abstract TransformerInputCopyBuilder> copy(); @Override public boolean equals(Object o) { @@ -127,8 +127,8 @@ public String toString() { * @version 0.1 * @since 05.06.20 */ - abstract static class TransformerInputCopyBuilder> - extends ConnectorInputCopyBuilder { + public abstract static class TransformerInputCopyBuilder> + extends ConnectorInputCopyBuilder { private int tapPos; private boolean autoTap; @@ -139,14 +139,14 @@ protected TransformerInputCopyBuilder(TransformerInput entity) { this.autoTap = entity.isAutoTap(); } - public T tapPos(int tapPos) { + public B tapPos(int tapPos) { this.tapPos = tapPos; - return childInstance(); + return thisInstance(); } - public T autoTap(boolean autoTap) { + public B autoTap(boolean autoTap) { this.autoTap = autoTap; - return childInstance(); + return thisInstance(); } protected int getTapPos() { @@ -161,6 +161,6 @@ protected boolean isAutoTap() { public abstract TransformerInput build(); @Override - protected abstract T childInstance(); + protected abstract B thisInstance(); } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/connector/type/Transformer2WTypeInput.java b/src/main/java/edu/ie3/datamodel/models/input/connector/type/Transformer2WTypeInput.java index fa2280c5f..319c4cf3f 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/connector/type/Transformer2WTypeInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/connector/type/Transformer2WTypeInput.java @@ -18,7 +18,7 @@ public class Transformer2WTypeInput extends AssetTypeInput { private final ComparableQuantity rSc; /** Short circuit reactance (typically in Ohm) */ private final ComparableQuantity xSc; - /** Rated apparent power (typically in MVA) */ + /** Rated apparent power (typically in kVA) */ private final ComparableQuantity sRated; /** Rated voltage of the high voltage winding (typically in kV) */ private final ComparableQuantity vRatedA; @@ -46,7 +46,7 @@ public class Transformer2WTypeInput extends AssetTypeInput { * @param id of the type * @param rSc Short circuit resistance * @param xSc Short circuit reactance - * @param sRated Rated apparent power (typically in MVA) + * @param sRated Rated apparent power (typically in kVA) * @param vRatedA Rated voltage of the high voltage winding * @param vRatedB Rated voltage of the low voltage winding * @param gM Phase-to-ground conductance diff --git a/src/main/java/edu/ie3/datamodel/models/input/connector/type/Transformer3WTypeInput.java b/src/main/java/edu/ie3/datamodel/models/input/connector/type/Transformer3WTypeInput.java index 488e142c6..65c308071 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/connector/type/Transformer3WTypeInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/connector/type/Transformer3WTypeInput.java @@ -14,11 +14,11 @@ /** Describes the type of a {@link edu.ie3.datamodel.models.input.connector.Transformer3WInput} */ public class Transformer3WTypeInput extends AssetTypeInput { - /** Rated apparent power of the high voltage winding (typically in MVA) */ + /** Rated apparent power of the high voltage winding (typically in kVA) */ private final ComparableQuantity sRatedA; // Hv - /** Rated apparent power of the medium voltage winding (typically in MVA) */ + /** Rated apparent power of the medium voltage winding (typically in kVA) */ private final ComparableQuantity sRatedB; // Mv - /** Rated apparent power of the low voltage windings (typically in MVA) */ + /** Rated apparent power of the low voltage windings (typically in kVA) */ private final ComparableQuantity sRatedC; // Lv /** Rated voltage magnitude of the high voltage winding (typically in kV) */ private final ComparableQuantity vRatedA; // Hv diff --git a/src/main/java/edu/ie3/datamodel/models/input/container/GraphicElements.java b/src/main/java/edu/ie3/datamodel/models/input/container/GraphicElements.java index 10e610dfc..e18430cab 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/container/GraphicElements.java +++ b/src/main/java/edu/ie3/datamodel/models/input/container/GraphicElements.java @@ -67,6 +67,11 @@ public final List allEntitiesAsList() { return Collections.unmodifiableList(allEntities); } + @Override + public GraphicElementsCopyBuilder copy() { + return new GraphicElementsCopyBuilder(this); + } + /** @return unmodifiable Set of all node graphic data for this grid */ public Set getNodeGraphics() { return Collections.unmodifiableSet(nodeGraphics); @@ -88,4 +93,55 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(nodeGraphics, lineGraphics); } + + /** + * A builder pattern based approach to create copies of {@link GraphicElements} containers with + * altered field values. For detailed field descriptions refer to java docs of {@link + * GraphicElements} + * + * @version 3.1 + * @since 14.02.23 + */ + public static class GraphicElementsCopyBuilder + implements InputContainerCopyBuilder { + private Set nodeGraphics; + private Set lineGraphics; + + /** + * Constructor for {@link GraphicElementsCopyBuilder} + * + * @param graphicElements instance of {@link GraphicElements} + */ + protected GraphicElementsCopyBuilder(GraphicElements graphicElements) { + this.nodeGraphics = graphicElements.getNodeGraphics(); + this.lineGraphics = graphicElements.getLineGraphics(); + } + + /** + * Method to alter the {@link NodeGraphicInput}. + * + * @param nodeGraphics set of altered {@link NodeGraphicInput}'s + * @return this instance of {@link GraphicElementsCopyBuilder} + */ + public GraphicElementsCopyBuilder nodeGraphics(Set nodeGraphics) { + this.nodeGraphics = nodeGraphics; + return this; + } + + /** + * Method to alter the {@link LineGraphicInput}. + * + * @param lineGraphics set of altered {@link LineGraphicInput}'s + * @return this instance of {@link GraphicElementsCopyBuilder} + */ + public GraphicElementsCopyBuilder lineGraphics(Set lineGraphics) { + this.lineGraphics = lineGraphics; + return this; + } + + @Override + public GraphicElements build() { + return new GraphicElements(nodeGraphics, lineGraphics); + } + } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/container/GridContainer.java b/src/main/java/edu/ie3/datamodel/models/input/container/GridContainer.java index 6e39a628a..3c076cfff 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/container/GridContainer.java +++ b/src/main/java/edu/ie3/datamodel/models/input/container/GridContainer.java @@ -78,4 +78,98 @@ public int hashCode() { public String toString() { return "GridContainer{" + "gridName='" + gridName + '\'' + '}'; } + + /** + * Abstract class for all builder that build child containers of abstract class {@link + * GridContainer} + * + * @version 3.1 + * @since 14.02.23 + */ + protected abstract static class GridContainerCopyBuilder> + implements InputContainerCopyBuilder { + private String gridName; + private RawGridElements rawGrid; + private SystemParticipants systemParticipants; + private GraphicElements graphics; + + /** + * Constructor for {@link GridContainerCopyBuilder}. + * + * @param gridContainer instance of {@link GridContainerCopyBuilder} + */ + protected GridContainerCopyBuilder(GridContainer gridContainer) { + this.gridName = gridContainer.getGridName(); + this.rawGrid = gridContainer.getRawGrid(); + this.systemParticipants = gridContainer.getSystemParticipants(); + this.graphics = gridContainer.getGraphics(); + } + + /** Returns grid name */ + protected String getGridName() { + return gridName; + } + + /** Returns {@link RawGridElements}. */ + protected RawGridElements getRawGrid() { + return rawGrid; + } + + /** Returns {@link SystemParticipants} */ + protected SystemParticipants getSystemParticipants() { + return systemParticipants; + } + + /** Returns {@link GraphicElements} */ + protected GraphicElements getGraphics() { + return graphics; + } + + /** + * Method to alter the grid name. + * + * @param gridName altered grid name + * @return this instance of {@link GridContainerCopyBuilder} + */ + public B gridName(String gridName) { + this.gridName = gridName; + return thisInstance(); + } + + /** + * Method to alter the {@link RawGridElements} + * + * @param rawGrid altered raw grid + * @return this instance of {@link GridContainerCopyBuilder} + */ + public B rawGrid(RawGridElements rawGrid) { + this.rawGrid = rawGrid; + return thisInstance(); + } + + /** + * Method to alter the {@link SystemParticipants}. + * + * @param systemParticipants altered systemParticipants + * @return this instance of {@link GridContainerCopyBuilder} + */ + public B systemParticipants(SystemParticipants systemParticipants) { + this.systemParticipants = systemParticipants; + return thisInstance(); + } + + /** + * Method to alter the {@link GraphicElements}. + * + * @param graphics altered graphics + * @return this instance of {@link GridContainerCopyBuilder} + */ + public B graphics(GraphicElements graphics) { + this.graphics = graphics; + return thisInstance(); + } + + /** Returns the current instance of builder with the correct subclass type */ + protected abstract B thisInstance(); + } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/container/InputContainer.java b/src/main/java/edu/ie3/datamodel/models/input/container/InputContainer.java index 4cf5a97de..82d12f152 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/container/InputContainer.java +++ b/src/main/java/edu/ie3/datamodel/models/input/container/InputContainer.java @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.models.input.container; +import edu.ie3.datamodel.exceptions.ValidationException; import edu.ie3.datamodel.models.input.InputEntity; import java.io.Serializable; import java.util.List; @@ -14,4 +15,20 @@ public interface InputContainer extends Serializable { /** @return unmodifiable List of all entities */ List allEntitiesAsList(); + + /** Returns an input container copy builder */ + InputContainerCopyBuilder copy(); + + /** + * Abstract class for all builder that build child containers of interface {@link + * edu.ie3.datamodel.models.input.container.InputContainer} + * + * @version 3.1 + * @since 14.02.23 + */ + interface InputContainerCopyBuilder { + + /** Returns the altered {@link InputContainer} */ + InputContainer build() throws ValidationException; + } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/container/JointGridContainer.java b/src/main/java/edu/ie3/datamodel/models/input/container/JointGridContainer.java index 267f21cb3..5a15f0bfb 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/container/JointGridContainer.java +++ b/src/main/java/edu/ie3/datamodel/models/input/container/JointGridContainer.java @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.models.input.container; +import edu.ie3.datamodel.exceptions.InvalidGridException; import edu.ie3.datamodel.graph.SubGridTopologyGraph; import edu.ie3.datamodel.utils.ContainerUtils; import java.util.Objects; @@ -22,7 +23,8 @@ public JointGridContainer( String gridName, RawGridElements rawGrid, SystemParticipants systemParticipants, - GraphicElements graphics) { + GraphicElements graphics) + throws InvalidGridException { super(gridName, rawGrid, systemParticipants, graphics); /* Build sub grid dependency */ @@ -76,4 +78,47 @@ public int hashCode() { public String toString() { return "JointGridContainer{" + "gridName='" + gridName + '\'' + '}'; } + + @Override + public JointGridContainerCopyBuilder copy() { + return new JointGridContainerCopyBuilder(this); + } + + /** + * A builder pattern based approach to create copies of {@link JointGridContainer} containers with + * altered field values. For detailed field descriptions refer to java docs of {@link + * JointGridContainer} + * + * @version 3.1 + * @since 14.02.23 + */ + public static class JointGridContainerCopyBuilder + extends GridContainerCopyBuilder { + private final SubGridTopologyGraph subGridTopologyGraph; + + /** + * Constructor for {@link JointGridContainerCopyBuilder} + * + * @param jointGridContainer instance of {@link JointGridContainer} + */ + protected JointGridContainerCopyBuilder(JointGridContainer jointGridContainer) { + super(jointGridContainer); + this.subGridTopologyGraph = jointGridContainer.getSubGridTopologyGraph(); + } + + @Override + protected JointGridContainerCopyBuilder thisInstance() { + return this; + } + + @Override + public JointGridContainer build() { + return new JointGridContainer( + getGridName(), + getRawGrid(), + getSystemParticipants(), + getGraphics(), + subGridTopologyGraph); + } + } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/container/RawGridElements.java b/src/main/java/edu/ie3/datamodel/models/input/container/RawGridElements.java index cf94fd793..498ef1850 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/container/RawGridElements.java +++ b/src/main/java/edu/ie3/datamodel/models/input/container/RawGridElements.java @@ -127,6 +127,11 @@ public final List allEntitiesAsList() { return Collections.unmodifiableList(allEntities); } + @Override + public RawGridElementsCopyBuilder copy() { + return new RawGridElementsCopyBuilder(this); + } + /** @return unmodifiable ; of all three winding transformers in this grid */ public Set getNodes() { return Collections.unmodifiableSet(nodes); @@ -173,4 +178,107 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(nodes, lines, transformer2Ws, transformer3Ws, switches, measurementUnits); } + + /** + * A builder pattern based approach to create copies of {@link RawGridElements} containers with + * altered field values. For detailed field descriptions refer to java docs of {@link + * RawGridElements} + * + * @version 3.1 + * @since 14.02.23 + */ + public static class RawGridElementsCopyBuilder implements InputContainerCopyBuilder { + private Set nodes; + private Set lines; + private Set transformer2Ws; + private Set transformer3Ws; + private Set switches; + private Set measurementUnits; + + /** + * Constructor for {@link RawGridElementsCopyBuilder} + * + * @param rawGridElements instance of {@link RawGridElementsCopyBuilder} + */ + protected RawGridElementsCopyBuilder(RawGridElements rawGridElements) { + this.nodes = rawGridElements.getNodes(); + this.lines = rawGridElements.getLines(); + this.transformer2Ws = rawGridElements.getTransformer2Ws(); + this.transformer3Ws = rawGridElements.getTransformer3Ws(); + this.switches = rawGridElements.getSwitches(); + this.measurementUnits = rawGridElements.getMeasurementUnits(); + } + + /** + * Method to alter {@link NodeInput} + * + * @param nodes set of altered nodes + * @return this instance of {@link RawGridElementsCopyBuilder} + */ + public RawGridElementsCopyBuilder nodes(Set nodes) { + this.nodes = nodes; + return this; + } + + /** + * Method to alter {@link LineInput} + * + * @param lines set of altered lines + * @return this instance of {@link RawGridElementsCopyBuilder} + */ + public RawGridElementsCopyBuilder lines(Set lines) { + this.lines = lines; + return this; + } + + /** + * Method to alter {@link Transformer2WInput} + * + * @param transformer2Ws set of altered two winding transformers + * @return this instance of {@link RawGridElementsCopyBuilder} + */ + public RawGridElementsCopyBuilder transformers2Ws(Set transformer2Ws) { + this.transformer2Ws = transformer2Ws; + return this; + } + + /** + * Method to alter {@link Transformer3WInput} + * + * @param transformer3Ws set of altered three winding trnasformers + * @return this instance of {@link RawGridElementsCopyBuilder} + */ + public RawGridElementsCopyBuilder transformer3Ws(Set transformer3Ws) { + this.transformer3Ws = transformer3Ws; + return this; + } + + /** + * Method to alter {@link SwitchInput} + * + * @param switches set of altered switches + * @return this instance of {@link RawGridElementsCopyBuilder} + */ + public RawGridElementsCopyBuilder switches(Set switches) { + this.switches = switches; + return this; + } + + /** + * Method to alter {@link MeasurementUnitInput} + * + * @param measurementUnits set of altered measurement units + * @return this instance of {@link RawGridElementsCopyBuilder} + */ + public RawGridElementsCopyBuilder measurementUnits(Set measurementUnits) { + this.measurementUnits = measurementUnits; + return this; + } + + @Override + public RawGridElements build() { + return new RawGridElements( + nodes, lines, transformer2Ws, transformer3Ws, switches, measurementUnits); + } + } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/container/SubGridContainer.java b/src/main/java/edu/ie3/datamodel/models/input/container/SubGridContainer.java index a4b230790..2c05b187f 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/container/SubGridContainer.java +++ b/src/main/java/edu/ie3/datamodel/models/input/container/SubGridContainer.java @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.models.input.container; +import edu.ie3.datamodel.exceptions.InvalidGridException; import edu.ie3.datamodel.models.voltagelevels.VoltageLevel; import edu.ie3.datamodel.utils.ContainerUtils; import java.util.Objects; @@ -23,7 +24,8 @@ public SubGridContainer( int subnet, RawGridElements rawGrid, SystemParticipants systemParticipants, - GraphicElements graphics) { + GraphicElements graphics) + throws InvalidGridException { super(gridName, rawGrid, systemParticipants, graphics); this.subnet = subnet; this.predominantVoltageLevel = ContainerUtils.determinePredominantVoltLvl(rawGrid, subnet); @@ -63,4 +65,54 @@ public String toString() { + predominantVoltageLevel + '}'; } + + @Override + public SubGridContainerCopyBuilder copy() { + return new SubGridContainerCopyBuilder(this); + } + + /** + * A builder pattern based approach to create copies of {@link SubGridContainer} containers with + * altered field values. For detailed field descriptions refer to java docs of {@link + * SubGridContainer} + * + * @version 3.1 + * @since 14.02.23 + */ + public static class SubGridContainerCopyBuilder + extends GridContainerCopyBuilder { + private int subnet; + + /** + * Constructor for {@link SubGridContainerCopyBuilder} + * + * @param subGridContainer instance of {@link SubGridContainer} + */ + protected SubGridContainerCopyBuilder(SubGridContainer subGridContainer) { + super(subGridContainer); + this.subnet = subGridContainer.getSubnet(); + } + + /** + * Method to alter the subnet number. + * + * @param subnet altered subnet number. + * @return this instance of {@link SubGridContainerCopyBuilder} + */ + public SubGridContainerCopyBuilder subnet(int subnet) { + this.subnet = subnet; + return thisInstance(); + } + + @Override + protected SubGridContainerCopyBuilder thisInstance() { + return this; + } + + @Override + public SubGridContainer build() throws InvalidGridException { + return new SubGridContainer( + getGridName(), subnet, getRawGrid(), getSystemParticipants(), getGraphics()); + } + } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/container/SystemParticipants.java b/src/main/java/edu/ie3/datamodel/models/input/container/SystemParticipants.java index 9cf96d239..ae90f50c4 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/container/SystemParticipants.java +++ b/src/main/java/edu/ie3/datamodel/models/input/container/SystemParticipants.java @@ -186,6 +186,10 @@ public final List allEntitiesAsList() { return Collections.unmodifiableList(allEntities); } + public SystemParticipantsCopyBuilder copy() { + return new SystemParticipantsCopyBuilder(this); + } + /** @return unmodifiable Set of all biomass plants in this grid */ public Set getBmPlants() { return Collections.unmodifiableSet(bmPlants); @@ -270,4 +274,183 @@ public int hashCode() { storages, wecPlants); } + + /** + * A builder pattern based approach to create copies of {@link SystemParticipants} containers with + * altered field values. For detailed field descriptions refer to java docs of {@link + * SystemParticipants} + * + * @version 3.1 + * @since 14.02.23 + */ + public static class SystemParticipantsCopyBuilder + implements InputContainerCopyBuilder { + private Set bmPlants; + private Set chpPlants; + private Set evCS; + private Set evs; + private Set fixedFeedIns; + private Set heatPumps; + private Set loads; + private Set pvPlants; + private Set storages; + private Set wecPlants; + private Set emSystems; + + /** + * Constructor for {@link SystemParticipantsCopyBuilder} + * + * @param systemParticipants instance of {@link SystemParticipants} + */ + protected SystemParticipantsCopyBuilder(SystemParticipants systemParticipants) { + this.bmPlants = systemParticipants.bmPlants; + this.chpPlants = systemParticipants.chpPlants; + this.evCS = systemParticipants.evCS; + this.evs = systemParticipants.evs; + this.fixedFeedIns = systemParticipants.fixedFeedIns; + this.heatPumps = systemParticipants.heatPumps; + this.loads = systemParticipants.loads; + this.pvPlants = systemParticipants.pvPlants; + this.storages = systemParticipants.storages; + this.wecPlants = systemParticipants.wecPlants; + this.emSystems = systemParticipants.emSystems; + } + + /** + * Method to alter {@link BmInput} + * + * @param bmPlants set of altered biomass plants + * @return this instance of {@link SystemParticipantsCopyBuilder} + */ + public SystemParticipantsCopyBuilder bmPlants(Set bmPlants) { + this.bmPlants = bmPlants; + return this; + } + + /** + * Method to alter {@link ChpInput} + * + * @param chpPlants set of altered combined heat and power plants + * @return this instance of {@link SystemParticipantsCopyBuilder} + */ + public SystemParticipantsCopyBuilder chpPlants(Set chpPlants) { + this.chpPlants = chpPlants; + return this; + } + + /** + * Method to alter {@link EvcsInput} + * + * @param evCS set of altered biomass electric vehicle charging stations + * @return this instance of {@link SystemParticipantsCopyBuilder} + */ + public SystemParticipantsCopyBuilder evCS(Set evCS) { + this.evCS = evCS; + return this; + } + + /** + * Method to alter {@link EvInput} + * + * @param evs set of altered electric vehicles + * @return this instance of {@link SystemParticipantsCopyBuilder} + */ + public SystemParticipantsCopyBuilder evs(Set evs) { + this.evs = evs; + return this; + } + + /** + * Method to alter {@link FixedFeedInInput} + * + * @param fixedFeedIns set of altered fixed feed in facilities + * @return this instance of {@link SystemParticipantsCopyBuilder} + */ + public SystemParticipantsCopyBuilder fixedFeedIn(Set fixedFeedIns) { + this.fixedFeedIns = fixedFeedIns; + return this; + } + + /** + * Method to alter {@link HpInput} + * + * @param heatPumps set of altered heat pumps + * @return this instance of {@link SystemParticipantsCopyBuilder} + */ + public SystemParticipantsCopyBuilder heatPumps(Set heatPumps) { + this.heatPumps = heatPumps; + return this; + } + + /** + * Method to alter {@link LoadInput} + * + * @param loads set of altered loads + * @return this instance of {@link SystemParticipantsCopyBuilder} + */ + public SystemParticipantsCopyBuilder loads(Set loads) { + this.loads = loads; + return this; + } + + /** + * Method to alter {@link PvInput} + * + * @param pvPlants set of altered photovoltaic power plants + * @return this instance of {@link SystemParticipantsCopyBuilder} + */ + public SystemParticipantsCopyBuilder pvPlants(Set pvPlants) { + this.pvPlants = pvPlants; + return this; + } + + /** + * Method to alter {@link StorageInput} + * + * @param storages set of altered electric energy storages + * @return this instance of {@link SystemParticipantsCopyBuilder} + */ + public SystemParticipantsCopyBuilder storages(Set storages) { + this.storages = storages; + return this; + } + + /** + * Method to alter {@link WecInput} + * + * @param wecPlants set of altered wind energy converters + * @return this instance of {@link SystemParticipantsCopyBuilder} + */ + public SystemParticipantsCopyBuilder wecPlants(Set wecPlants) { + this.wecPlants = wecPlants; + return this; + } + + /** + * Method to alter {@link EmInput} + * + * @param emSystems set of altered energy management systems + * @return this instance of {@link SystemParticipantsCopyBuilder} + */ + public SystemParticipantsCopyBuilder emSystems(Set emSystems) { + this.emSystems = emSystems; + return this; + } + + @Override + public SystemParticipants build() { + return new SystemParticipants( + bmPlants, + chpPlants, + evCS, + evs, + fixedFeedIns, + heatPumps, + loads, + pvPlants, + storages, + wecPlants, + emSystems); + } + } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/container/ThermalGrid.java b/src/main/java/edu/ie3/datamodel/models/input/container/ThermalGrid.java index 87c102997..3b6d277cc 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/container/ThermalGrid.java +++ b/src/main/java/edu/ie3/datamodel/models/input/container/ThermalGrid.java @@ -35,6 +35,11 @@ public List allEntitiesAsList() { return ret; } + @Override + public ThermalGridCopyBuilder copy() { + return new ThermalGridCopyBuilder(this); + } + @Override public String toString() { return "ThermalGrid{" @@ -46,4 +51,66 @@ public String toString() { + storages.size() + '}'; } + + /** + * A builder pattern based approach to create copies of {@link ThermalGrid} containers with + * altered field values. For detailed field descriptions refer to java docs of {@link ThermalGrid} + * + * @version 3.1 + * @since 14.02.23 + */ + public static class ThermalGridCopyBuilder implements InputContainerCopyBuilder { + private ThermalBusInput bus; + private Set houses; + private Set storages; + + /** + * Constructor for {@link ThermalGridCopyBuilder} + * + * @param thermalGrid instance of {@link ThermalGrid} + */ + protected ThermalGridCopyBuilder(ThermalGrid thermalGrid) { + this.bus = thermalGrid.bus(); + this.houses = thermalGrid.houses(); + this.storages = thermalGrid.storages(); + } + + /** + * Method to alter {@link ThermalBusInput} + * + * @param bus altered thermal bus + * @return this instance of {@link ThermalGridCopyBuilder} + */ + public ThermalGridCopyBuilder bus(ThermalBusInput bus) { + this.bus = bus; + return this; + } + + /** + * Method to alter {@link ThermalHouseInput} + * + * @param houses altered thermal houses + * @return this instance of {@link ThermalGridCopyBuilder} + */ + public ThermalGridCopyBuilder houses(Set houses) { + this.houses = houses; + return this; + } + + /** + * Method to alter {@link ThermalStorageInput} + * + * @param storages altered thermal storages + * @return this instance of {@link ThermalGridCopyBuilder} + */ + public ThermalGridCopyBuilder storages(Set storages) { + this.storages = storages; + return this; + } + + @Override + public ThermalGrid build() { + return new ThermalGrid(bus, houses, storages); + } + } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/container/ThermalUnits.java b/src/main/java/edu/ie3/datamodel/models/input/container/ThermalUnits.java index 2e2cd0a7c..0a2b84a03 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/container/ThermalUnits.java +++ b/src/main/java/edu/ie3/datamodel/models/input/container/ThermalUnits.java @@ -31,8 +31,64 @@ public List allEntitiesAsList() { return ret; } + @Override + public ThermalUnitsCopyBuilder copy() { + return new ThermalUnitsCopyBuilder(this); + } + @Override public String toString() { return "ThermalUnits{" + "#houses=" + houses.size() + ", #storages=" + storages.size() + '}'; } + + /** + * A builder pattern based approach to create copies of {@link ThermalUnits} containers with + * altered field values. For detailed field descriptions refer to java docs of {@link + * ThermalUnits} + * + * @version 3.1 + * @since 14.02.23 + */ + public static class ThermalUnitsCopyBuilder + implements InputContainerCopyBuilder { + private Set houses; + private Set storages; + + /** + * Constructor for {@link ThermalUnitsCopyBuilder} + * + * @param thermalUnits instance of {@link ThermalUnits} + */ + protected ThermalUnitsCopyBuilder(ThermalUnits thermalUnits) { + this.houses = thermalUnits.houses(); + this.storages = thermalUnits.storages(); + } + + /** + * Method to alter {@link ThermalHouseInput} + * + * @param houses altered thermal houses + * @return this instance of {@link ThermalUnitsCopyBuilder} + */ + public ThermalUnitsCopyBuilder houses(Set houses) { + this.houses = houses; + return this; + } + + /** + * Method to alter {@link ThermalStorageInput} + * + * @param storages altered thermal storages + * @return this instance of {@link ThermalUnitsCopyBuilder} + */ + public ThermalUnitsCopyBuilder storages(Set storages) { + this.storages = storages; + return this; + } + + @Override + public ThermalUnits build() { + return new ThermalUnits(houses, storages); + } + } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/graphics/GraphicInput.java b/src/main/java/edu/ie3/datamodel/models/input/graphics/GraphicInput.java index a7648c522..cf865732d 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/graphics/GraphicInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/graphics/GraphicInput.java @@ -65,14 +65,16 @@ public String toString() { + '}'; } + public abstract GraphicInputCopyBuilder> copy(); + /** * Abstract class for all builder that build child entities of abstract class {@link GraphicInput} * * @version 0.1 * @since 05.06.20 */ - protected abstract static class GraphicInputCopyBuilder> - extends UniqueEntityCopyBuilder { + public abstract static class GraphicInputCopyBuilder> + extends UniqueEntityCopyBuilder { private String graphicLayer; private LineString path; @@ -83,14 +85,14 @@ protected GraphicInputCopyBuilder(GraphicInput entity) { this.path = entity.getPath(); } - public T graphicLayer(String graphicLayer) { + public B graphicLayer(String graphicLayer) { this.graphicLayer = graphicLayer; - return childInstance(); + return thisInstance(); } - public T path(LineString path) { + public B path(LineString path) { this.path = path; - return childInstance(); + return thisInstance(); } protected String getGraphicLayer() { @@ -105,6 +107,6 @@ protected LineString getPath() { public abstract GraphicInput build(); @Override - protected abstract T childInstance(); + protected abstract B thisInstance(); } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/graphics/LineGraphicInput.java b/src/main/java/edu/ie3/datamodel/models/input/graphics/LineGraphicInput.java index a1b289c64..65520abc9 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/graphics/LineGraphicInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/graphics/LineGraphicInput.java @@ -76,7 +76,7 @@ private LineGraphicInputCopyBuilder(LineGraphicInput entity) { } @Override - protected LineGraphicInputCopyBuilder childInstance() { + protected LineGraphicInputCopyBuilder thisInstance() { return this; } diff --git a/src/main/java/edu/ie3/datamodel/models/input/graphics/NodeGraphicInput.java b/src/main/java/edu/ie3/datamodel/models/input/graphics/NodeGraphicInput.java index 85f8594ca..9ad76c8f7 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/graphics/NodeGraphicInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/graphics/NodeGraphicInput.java @@ -105,7 +105,7 @@ public NodeGraphicInputCopyBuilder node(NodeInput node) { } @Override - protected NodeGraphicInputCopyBuilder childInstance() { + protected NodeGraphicInputCopyBuilder thisInstance() { return this; } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/BmInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/BmInput.java index 3ef758a59..ec6003abe 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/BmInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/BmInput.java @@ -215,7 +215,7 @@ public BmInput build() { } @Override - protected BmInputCopyBuilder childInstance() { + protected BmInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/ChpInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/ChpInput.java index b1bf2ee53..422817ecb 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/ChpInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/ChpInput.java @@ -216,7 +216,7 @@ public ChpInputCopyBuilder marketReaction(boolean marketReaction) { } @Override - protected ChpInputCopyBuilder childInstance() { + protected ChpInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/EmInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/EmInput.java index 42329dc07..eeac609aa 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/EmInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/EmInput.java @@ -206,7 +206,7 @@ public EmInput build() { } @Override - protected EmInputCopyBuilder childInstance() { + protected EmInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/EvInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/EvInput.java index a483bb756..5f2edeae9 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/EvInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/EvInput.java @@ -139,7 +139,7 @@ public EvInputCopyBuilder type(EvTypeInput type) { } @Override - protected EvInputCopyBuilder childInstance() { + protected EvInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/EvcsInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/EvcsInput.java index e08829f16..c8f02ed38 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/EvcsInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/EvcsInput.java @@ -284,7 +284,7 @@ public EvcsInput build() { } @Override - protected EvcsInputCopyBuilder childInstance() { + protected EvcsInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/FixedFeedInInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/FixedFeedInInput.java index f3d8d76d9..16ba19dc7 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/FixedFeedInInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/FixedFeedInInput.java @@ -162,7 +162,7 @@ public FixedFeedInInputCopyBuilder cosPhiRated(double cosPhiRated) { } @Override - protected FixedFeedInInputCopyBuilder childInstance() { + protected FixedFeedInInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/HpInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/HpInput.java index edb52b673..ef5bf7fea 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/HpInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/HpInput.java @@ -164,7 +164,7 @@ public HpInputCopyBuilder thermalBus(ThermalBusInput thermalBus) { } @Override - protected HpInputCopyBuilder childInstance() { + protected HpInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/LoadInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/LoadInput.java index 0b3218441..9f320d851 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/LoadInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/LoadInput.java @@ -322,7 +322,7 @@ public LoadInput build() { } @Override - protected LoadInputCopyBuilder childInstance() { + protected LoadInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/PvInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/PvInput.java index 382e449ac..49a68253f 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/PvInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/PvInput.java @@ -336,7 +336,7 @@ public PvInput build() { } @Override - protected PvInputCopyBuilder childInstance() { + protected PvInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/StorageInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/StorageInput.java index a019e7924..26726cb03 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/StorageInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/StorageInput.java @@ -139,7 +139,7 @@ public StorageInputCopyBuilder type(StorageTypeInput type) { } @Override - protected StorageInputCopyBuilder childInstance() { + protected StorageInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/SystemParticipantInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/SystemParticipantInput.java index e05167371..9fa4a00a6 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/SystemParticipantInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/SystemParticipantInput.java @@ -116,8 +116,8 @@ public String toString() { * @since 05.06.20 */ public abstract static class SystemParticipantInputCopyBuilder< - T extends SystemParticipantInputCopyBuilder> - extends AssetInputCopyBuilder { + B extends SystemParticipantInputCopyBuilder> + extends AssetInputCopyBuilder { private NodeInput node; private ReactivePowerCharacteristic qCharacteristics; @@ -128,14 +128,14 @@ protected SystemParticipantInputCopyBuilder(SystemParticipantInput entity) { this.qCharacteristics = entity.getqCharacteristics(); } - public T node(NodeInput node) { + public B node(NodeInput node) { this.node = node; - return childInstance(); + return thisInstance(); } - public T qCharacteristics(ReactivePowerCharacteristic qCharacteristics) { + public B qCharacteristics(ReactivePowerCharacteristic qCharacteristics) { this.qCharacteristics = qCharacteristics; - return childInstance(); + return thisInstance(); } protected NodeInput getNode() { @@ -150,6 +150,6 @@ protected ReactivePowerCharacteristic getqCharacteristics() { public abstract SystemParticipantInput build(); @Override - protected abstract T childInstance(); + protected abstract B thisInstance(); } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/WecInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/WecInput.java index d645824ff..d5c5122d5 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/WecInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/WecInput.java @@ -163,7 +163,7 @@ public WecInputCopyBuilder marketReaction(boolean marketReaction) { } @Override - protected WecInputCopyBuilder childInstance() { + protected WecInputCopyBuilder thisInstance() { return this; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/thermal/CylindricalStorageInput.java b/src/main/java/edu/ie3/datamodel/models/input/thermal/CylindricalStorageInput.java index 478b8ddf8..672ec754c 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/thermal/CylindricalStorageInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/thermal/CylindricalStorageInput.java @@ -222,7 +222,7 @@ public CylindricalStorageInputCopyBuilder c(ComparableQuantity copy(); } diff --git a/src/main/java/edu/ie3/datamodel/models/input/thermal/ThermalUnitInput.java b/src/main/java/edu/ie3/datamodel/models/input/thermal/ThermalUnitInput.java index 1c1c92f6b..ebb911198 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/thermal/ThermalUnitInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/thermal/ThermalUnitInput.java @@ -82,8 +82,8 @@ public String toString() { * ThermalUnitInput} */ protected abstract static class ThermalUnitInputCopyBuilder< - T extends ThermalUnitInput.ThermalUnitInputCopyBuilder> - extends AssetInputCopyBuilder { + B extends ThermalUnitInput.ThermalUnitInputCopyBuilder> + extends AssetInputCopyBuilder { private ThermalBusInput thermalBus; @@ -92,9 +92,9 @@ protected ThermalUnitInputCopyBuilder(ThermalUnitInput entity) { this.thermalBus = entity.getThermalBus(); } - public T thermalBus(ThermalBusInput thermalBus) { + public B thermalBus(ThermalBusInput thermalBus) { this.thermalBus = thermalBus; - return childInstance(); + return thisInstance(); } protected ThermalBusInput getThermalBus() { @@ -105,6 +105,6 @@ protected ThermalBusInput getThermalBus() { public abstract ThermalUnitInput build(); @Override - protected abstract T childInstance(); + protected abstract B thisInstance(); } } diff --git a/src/main/java/edu/ie3/datamodel/models/result/connector/ConnectorResult.java b/src/main/java/edu/ie3/datamodel/models/result/connector/ConnectorResult.java index ee241b0a1..7b8030396 100644 --- a/src/main/java/edu/ie3/datamodel/models/result/connector/ConnectorResult.java +++ b/src/main/java/edu/ie3/datamodel/models/result/connector/ConnectorResult.java @@ -124,7 +124,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(iAMag, iAAng, iBMag, iBAng); + return Objects.hash(super.hashCode(), iAMag, iAAng, iBMag, iBAng); } @Override diff --git a/src/main/java/edu/ie3/datamodel/models/value/CoordinateValue.java b/src/main/java/edu/ie3/datamodel/models/value/CoordinateValue.java new file mode 100644 index 000000000..45721efb3 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/models/value/CoordinateValue.java @@ -0,0 +1,18 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.models.value; + +import org.locationtech.jts.geom.Point; + +public class CoordinateValue implements Value { + public final Integer id; + public final Point coordinate; + + public CoordinateValue(int id, Point coordinate) { + this.id = id; + this.coordinate = coordinate; + } +} diff --git a/src/main/java/edu/ie3/datamodel/utils/ContainerNodeUpdateUtil.java b/src/main/java/edu/ie3/datamodel/utils/ContainerNodeUpdateUtil.java index 0baa490de..4778fabea 100644 --- a/src/main/java/edu/ie3/datamodel/utils/ContainerNodeUpdateUtil.java +++ b/src/main/java/edu/ie3/datamodel/utils/ContainerNodeUpdateUtil.java @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.utils; +import edu.ie3.datamodel.exceptions.InvalidGridException; import edu.ie3.datamodel.models.input.MeasurementUnitInput; import edu.ie3.datamodel.models.input.NodeInput; import edu.ie3.datamodel.models.input.connector.*; @@ -40,7 +41,7 @@ private ContainerNodeUpdateUtil() { * @return a copy of the provided grid with updated nodes as provided */ public static GridContainer updateGridWithNodes( - GridContainer grid, Map oldToNewNodes) { + GridContainer grid, Map oldToNewNodes) throws InvalidGridException { if (grid instanceof JointGridContainer jointGridContainer) { return updateGridWithNodes(jointGridContainer, oldToNewNodes); } else { @@ -66,7 +67,8 @@ public static GridContainer updateGridWithNodes( * @return a copy of the provided grid with updated nodes as provided */ public static JointGridContainer updateGridWithNodes( - JointGridContainer grid, Map oldToNewNodes) { + JointGridContainer grid, Map oldToNewNodes) + throws InvalidGridException { UpdatedEntities updatedEntities = updateEntities( grid.getRawGrid(), grid.getSystemParticipants(), grid.getGraphics(), oldToNewNodes); @@ -99,7 +101,7 @@ public static JointGridContainer updateGridWithNodes( * @return a copy of the provided grid with updated nodes as provided */ public static SubGridContainer updateGridWithNodes( - SubGridContainer grid, Map oldToNewNodes) { + SubGridContainer grid, Map oldToNewNodes) throws InvalidGridException { UpdatedEntities updatedEntities = updateEntities( diff --git a/src/main/java/edu/ie3/datamodel/utils/ContainerUtils.java b/src/main/java/edu/ie3/datamodel/utils/ContainerUtils.java index 6470290f1..bf1eb0615 100644 --- a/src/main/java/edu/ie3/datamodel/utils/ContainerUtils.java +++ b/src/main/java/edu/ie3/datamodel/utils/ContainerUtils.java @@ -426,7 +426,8 @@ public static GraphicElements filterForSubnet(GraphicElements input, int subnet) * @return The predominant voltage level in this grid * @throws InvalidGridException If not a single, predominant voltage level can be determined */ - public static VoltageLevel determinePredominantVoltLvl(RawGridElements rawGrid, int subnet) { + public static VoltageLevel determinePredominantVoltLvl(RawGridElements rawGrid, int subnet) + throws InvalidGridException { /* Exclude all nodes, that are at the high voltage side of the transformer */ Set gridNodes = new HashSet<>(rawGrid.getNodes()); gridNodes.removeAll( @@ -502,7 +503,8 @@ public static SubGridTopologyGraph buildSubGridTopologyGraph( String gridName, RawGridElements rawGrid, SystemParticipants systemParticipants, - GraphicElements graphics) { + GraphicElements graphics) + throws InvalidGridException { /* Collect the different sub nets. Through the validation of lines, it is ensured, that no galvanically connected * grid has more than one subnet number assigned */ SortedSet subnetNumbers = determineSubnetNumbers(rawGrid.getNodes()); @@ -540,7 +542,8 @@ private static HashMap buildSubGridContainers( SortedSet subnetNumbers, RawGridElements rawGrid, SystemParticipants systemParticipants, - GraphicElements graphics) { + GraphicElements graphics) + throws InvalidGridException { HashMap subGrids = new HashMap<>(subnetNumbers.size()); for (int subnetNumber : subnetNumbers) { RawGridElements rawGridElements = ContainerUtils.filterForSubnet(rawGrid, subnetNumber); @@ -564,7 +567,8 @@ private static HashMap buildSubGridContainers( * @return An immutable graph of the sub grid topology */ private static SubGridTopologyGraph buildSubGridTopologyGraph( - Map subGrids, RawGridElements rawGridElements) { + Map subGrids, RawGridElements rawGridElements) + throws InvalidGridException { /* Building a mutable graph, that is boxed as immutable later */ DirectedMultigraph mutableGraph = new DirectedMultigraph<>(SubGridGate.class); @@ -777,7 +781,7 @@ private static LinkedList traverseAlongSwitchChain( * @return A joint model */ public static JointGridContainer combineToJointGrid( - Collection subGridContainers) { + Collection subGridContainers) throws InvalidGridException { if (subGridContainers.stream().map(SubGridContainer::getGridName).distinct().count() > 1) throw new InvalidGridException( "You are trying to combine sub grids of different grid models"); @@ -852,7 +856,8 @@ public static JointGridContainer combineToJointGrid( * @param subGridContainer the subgrid container to be altered * @return a copy of the given {@link SubGridContainer} with transformer nodes marked as slack */ - public static SubGridContainer withTrafoNodeAsSlack(final SubGridContainer subGridContainer) { + public static SubGridContainer withTrafoNodeAsSlack(final SubGridContainer subGridContainer) + throws InvalidGridException { // transformer 3w Map oldToNewTrafo3WANodes = new HashMap<>(); diff --git a/src/main/java/edu/ie3/datamodel/utils/ExceptionUtils.java b/src/main/java/edu/ie3/datamodel/utils/ExceptionUtils.java new file mode 100644 index 000000000..018580d20 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/utils/ExceptionUtils.java @@ -0,0 +1,27 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.utils; + +import java.util.List; + +public class ExceptionUtils { + private ExceptionUtils() { + throw new IllegalStateException("Utility classes cannot be instantiated"); + } + + /** + * Creates a string containing multiple exception messages. + * + * @param exceptions list of exceptions + * @return str containing the messages + */ + public static String getMessages(List exceptions) { + return exceptions.stream() + .map(Throwable::getMessage) + .reduce("", (a, b) -> a + "\n " + b) + .replaceFirst("\n ", ""); + } +} diff --git a/src/main/java/edu/ie3/datamodel/utils/FileUtils.java b/src/main/java/edu/ie3/datamodel/utils/FileUtils.java new file mode 100644 index 000000000..845a30105 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/utils/FileUtils.java @@ -0,0 +1,90 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.utils; + +import edu.ie3.datamodel.io.IoUtil; +import java.nio.file.Path; +import java.util.Objects; +import java.util.Optional; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Some utility functionalities. */ +public class FileUtils { + private static final Pattern FILE_NAME_PATTERN = + Pattern.compile( + "^(?[^\\\\/\\s.]{0,255})(?:\\.(?[a-zA-Z0-9]{0,10}(?:\\.[a-zA-Z0-9]{0,10})?))?$"); + private static final String CSV_FILE_EXTENSION = "csv"; + + private static final Logger logger = LoggerFactory.getLogger(FileUtils.class); + + private FileUtils() { + throw new IllegalStateException("Utility classes cannot be instantiated"); + } + + /** + * Method to get a {@link Path} from a filename and an option of a directory path. + * + * @param fileName of the file + * @param directoryPath option for the directory path + * @return a definition of a file + */ + public static Path of(String fileName, Optional directoryPath) { + return directoryPath.map(IoUtil::harmonizeFileSeparator).orElse(Path.of("")).resolve(fileName); + } + + /** + * Method to get a {@link Path} when two {@link Optional}'s are provided. + * + * @param fileName option for a filename + * @param directoryPath option for a directory path + * @return an option for a path + */ + public static Optional of(Optional fileName, Optional directoryPath) { + // do not adapt orElseGet, see https://www.baeldung.com/java-optional-or-else-vs-or-else-get for + // details + return Optional.of( + directoryPath + .map(IoUtil::harmonizeFileSeparator) + .orElseGet(() -> Path.of("")) + .resolve(fileName.orElseGet(() -> ""))); + } + + /** + * Method to get the {@link Path} of a csv file. This method will check whether the filename + * contains a csv extension. Also, this method will harmonize the path of the given directory + * path. + * + * @param fileName of the file + * @param directoryPath path to the directory + * @return a definition of the file + */ + public static Path ofCsv(String fileName, Path directoryPath) { + /* Remove all file separators at the beginning and end of a directory path and ensure harmonized file separator */ + Path dirPath = + Objects.nonNull(directoryPath) ? IoUtil.harmonizeFileSeparator(directoryPath) : Path.of(""); + + /* Check the given information of the file name */ + Matcher matcher = FILE_NAME_PATTERN.matcher(fileName); + + if (matcher.matches()) { + String extension = matcher.group("extension"); + if (Objects.nonNull(extension) && !extension.equalsIgnoreCase(CSV_FILE_EXTENSION)) + logger.warn( + "You provided a file name with extension '{}'. It will be overridden to '{}'.", + extension, + CSV_FILE_EXTENSION); + return dirPath.resolve(matcher.group("fileName") + "." + CSV_FILE_EXTENSION); + } else { + throw new IllegalArgumentException( + "The file name '" + + fileName + + "' is no valid file name. It may contain everything, except '/', '\\', '.' and any white space character."); + } + } +} diff --git a/src/main/java/edu/ie3/datamodel/utils/GridAndGeoUtils.java b/src/main/java/edu/ie3/datamodel/utils/GridAndGeoUtils.java index 325257599..ef7da382c 100644 --- a/src/main/java/edu/ie3/datamodel/utils/GridAndGeoUtils.java +++ b/src/main/java/edu/ie3/datamodel/utils/GridAndGeoUtils.java @@ -19,19 +19,6 @@ private GridAndGeoUtils() { throw new IllegalStateException("Utility classes cannot be instantiated."); } - /** - * Builds a straight line string between the both nodes - * - * @param a Starting point of the line string - * @param b Ending point of the line string - * @return The equivalent straight line string - * @deprecated Use {@link #buildSafeLineStringBetweenNodes(NodeInput, NodeInput)} instead - */ - @Deprecated(since = "1.1.0", forRemoval = true) - public static LineString buildLineStringBetweenNodes(NodeInput a, NodeInput b) { - return buildSafeLineStringBetweenPoints(a.getGeoPosition(), b.getGeoPosition()); - } - /** * Builds a straight line string between the both nodes that can be compared safely even if the * two provided nodes contain exactly equal coordinates diff --git a/src/main/java/edu/ie3/datamodel/utils/StreamUtils.java b/src/main/java/edu/ie3/datamodel/utils/StreamUtils.java new file mode 100644 index 000000000..bd7ed9347 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/utils/StreamUtils.java @@ -0,0 +1,72 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.utils; + +import java.util.Iterator; +import java.util.Spliterator; +import java.util.Spliterators; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; +import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.commons.lang3.tuple.Pair; + +/** Class containing some stream utils. */ +public class StreamUtils { + private StreamUtils() {} + + /** + * Used to zip a stream with an integer stream. + * + * @param a the stream that should be zipped + * @return a stream of pairs of input stream elements and a corresponding integer value + * @param type of the input stream + */ + public static Stream> zipWithRowIndex(Stream a) { + return zip(a, getIntStream()); + } + + /** + * Used to zip two stream with each other. + * + * @param a first input stream + * @param b second input stream + * @return a stream of pairs of the two input streams + * @param type of the first input stream + * @param type of the second input stream + */ + public static Stream> zip(Stream a, Stream b) { + return StreamSupport.stream( + Spliterators.spliteratorUnknownSize( + zip(a.iterator(), b.iterator()), Spliterator.ORDERED | Spliterator.NONNULL), + false); + } + + /** + * Used to zip to iterators. + * + * @param a first iterator + * @param b second iterator + * @return an iterator of pairs of the two input iterators + * @param type of the first iterator + * @param type of the second iterator + */ + public static Iterator> zip(Iterator a, Iterator b) { + return new Iterator<>() { + public boolean hasNext() { + return a.hasNext() && b.hasNext(); // This uses the shorter of the two `Iterator`s. + } + + public Pair next() { + return new ImmutablePair<>(a.next(), b.next()); + } + }; + } + + /** Returns an infinite integer stream. */ + private static Stream getIntStream() { + return Stream.iterate(1, i -> i + 1); + } +} diff --git a/src/main/java/edu/ie3/datamodel/utils/Try.java b/src/main/java/edu/ie3/datamodel/utils/Try.java new file mode 100644 index 000000000..06a9a5526 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/utils/Try.java @@ -0,0 +1,444 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.utils; + +import static java.util.stream.Collectors.partitioningBy; + +import edu.ie3.datamodel.exceptions.FailureException; +import edu.ie3.datamodel.exceptions.TryException; +import java.util.*; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public abstract class Try { + // static utility methods + + /** + * Method to create a {@link Try} object easily. + * + * @param supplier that either returns data or throws an exception + * @param clazz class of the exception + * @return a try object + * @param type of data + * @param type of exception that could be thrown + */ + @SuppressWarnings("unchecked") + public static Try of(TrySupplier supplier, Class clazz) { + try { + return new Success<>(supplier.get()); + } catch (Exception e) { + // this is necessary because we only want to catch exceptions that are of type E + if (e.getClass().isAssignableFrom(clazz)) { + return new Failure<>((E) e); + } else { + throw new TryException("Wrongly caught exception: ", e); + } + } + } + + /** + * Method to create a {@link Try} object easily. + * + * @param supplier that either returns no data or throws an exception + * @param clazz class of the exception + * @return a try object + * @param type of exception that could be thrown + */ + @SuppressWarnings("unchecked") + public static Try ofVoid( + VoidSupplier supplier, Class clazz) { + try { + supplier.get(); + return Success.empty(); + } catch (Exception e) { + // this is necessary because we only want to catch exceptions that are of type E + if (e.getClass().isAssignableFrom(clazz)) { + return Failure.ofVoid((E) e); + } else { + throw new TryException("Wrongly caught exception: ", e); + } + } + } + + /** + * Method to create a {@link Try} object easily. + * + * @param failure a {@link Failure} is returned. + * @param exception exception that should be wrapped by a {@link Failure} + * @return a {@link Try} + * @param type of exception + */ + public static Try ofVoid( + boolean failure, ExceptionSupplier exception) { + if (failure) { + return Failure.ofVoid(exception.get()); + } else { + return Success.empty(); + } + } + + /** + * Utility method to check a list of {@link VoidSupplier}'s. + * + * @param supplier list of {@link VoidSupplier} + * @param clazz class of the exception + * @return a list of {@link Try} + * @param type of the exception + */ + @SafeVarargs + public static List> ofVoid( + Class clazz, VoidSupplier... supplier) { + return Arrays.stream(supplier).map(sup -> Try.ofVoid(sup, clazz)).toList(); + } + + /** + * Method to retrieve the exceptions from all {@link Failure} objects. + * + * @param tries collection of {@link Try} objects + * @return a list of {@link Exception}'s + */ + public static List getExceptions( + Collection> tries) { + return tries.stream().filter(Try::isFailure).map(t -> ((Failure) t).get()).toList(); + } + + /** + * Method to scan a collection of {@link Try} objects for {@link Failure}'s. + * + * @param c collection of {@link Try} objects + * @param typeOfData type of data + * @return a {@link Success} if no {@link Failure}'s are found in the collection + * @param type of data + */ + public static Try, FailureException> scanCollection( + Collection> c, Class typeOfData) { + return scanStream(c.stream(), typeOfData.getSimpleName()) + .transformS(stream -> stream.collect(Collectors.toSet())); + } + + /** + * Method to scan a stream of {@link Try} objects for {@link Failure}'s. + * + * @param stream of {@link Try} objects + * @return a {@link Success} if no {@link Failure}'s are found in the stream + * @param type of data + */ + public static Try, FailureException> scanStream( + Stream> stream, String typeOfData) { + Map>> map = stream.collect(partitioningBy(Try::isSuccess)); + + List> successes = map.get(true); + List> failures = map.get(false); + + // Both lists should exist in map per definition of partitioningBy + assert successes != null && failures != null; + + if (!failures.isEmpty()) { + E first = ((Failure) failures.get(0)).exception; + + return new Failure<>( + new FailureException( + failures.size() + + " exception(s) occurred within \"" + + typeOfData + + "\" data, one is: " + + first, + first.getCause())); + } else { + return new Success<>(successes.stream().map(t -> ((Success) t).data)); + } + } + + // methods of try object + + /** + * Returns true if this object is a {@link Success} or false if this object is a {@link Failure}. + */ + public abstract boolean isSuccess(); + + /** + * Returns true if this object is a {@link Failure} or false if this object is a {@link Success}. + */ + public abstract boolean isFailure(); + + /** + * Method for getting the data. If this object is a {@link Failure} the exception is thrown. + * + * @return data id this object is a {@link Success} + * @throws E if this object is a {@link Failure} + */ + public abstract T getOrThrow() throws E; + + /** Returns an option for data. */ + public abstract Optional getData(); + + /** Returns an option for an exception. */ + public abstract Optional getException(); + + // functional methods + + /** + * Method to transform the data if this object is a {@link Success}. + * + * @param mapper that is used to map the data + * @return a new {@link Try} object + * @param type of the data + */ + public Try map(Function mapper) { + return transformS(mapper); + } + + /** + * Method to transform and flat the data. + * + * @param mapper that is used to map the data + * @return a new {@link Try} object + * @param type of the data + */ + public abstract Try flatMap(Function> mapper); + + /** + * Method to transform a {@link Try} object. This method should be used, if processing the + * exception is not necessary. + * + * @param successFunc that will be used to transform the data + * @return a new {@link Try} object + * @param type of data + */ + public abstract Try transformS(Function successFunc); + + /** + * Method to transform a {@link Try} object. This method should be used, if only exception should + * be processed. + * + * @param failureFunc that will be used to transform the exception + * @return a new {@link Try} object + * @param type of new exception + */ + public abstract Try transformF( + Function failureFunc); + + /** + * Method to transform a {@link Try} object. This method should be used, if processing the + * exception is necessary. + * + * @param successFunc that will be used to transform the data + * @param failureFunc that will be used to transform the exception + * @return a new {@link Try} object + * @param type of data + */ + public abstract Try transform( + Function successFunc, Function failureFunc); + + /** Implementation of {@link Try} class. This class is used to present a successful try. */ + public static final class Success extends Try { + private final T data; + + private static final Success emptySuccess = new Success<>(null); + + public Success(T data) { + this.data = data; + } + + @Override + public boolean isSuccess() { + return true; + } + + @Override + public boolean isFailure() { + return false; + } + + /** Returns true if this object is an empty {@link Success}. */ + public boolean isEmpty() { + return data == null; + } + + @Override + public T getOrThrow() throws E { + return get(); + } + + @Override + public Optional getData() { + return !isEmpty() ? Optional.of(data) : Optional.empty(); + } + + @Override + public Optional getException() { + return Optional.empty(); + } + + @SuppressWarnings("unchecked") + @Override + public Try flatMap(Function> mapper) { + Try, E> t = transformS(mapper); + return t instanceof Success, ?> success ? success.get() : (Try) t; + } + + @Override + public Try transformS(Function successFunc) { + return new Success<>(successFunc.apply(data)); + } + + @Override + public Try transformF( + Function failureFunc) { + return new Success<>(data); + } + + @Override + public Try transform( + Function successFunc, Function failureFunc) { + return new Success<>(successFunc.apply(data)); + } + + /** Returns the stored data. */ + public T get() { + return data; + } + + /** + * Method to create a {@link Success} by applying data. + * + * @param data that should be wrapped by the {@link Success} + * @return a new {@link Success} + * @param type of data + * @param type of exception + */ + public static Success of(D data) { + return new Success<>(data); + } + + /** + * Returns an empty {@link Success}. + * + * @param type of exception + */ + @SuppressWarnings("unchecked") + public static Success empty() { + return (Success) emptySuccess; + } + } + + /** Implementation of {@link Try} class. This class is used to present a failed try. */ + public static final class Failure extends Try { + private final E exception; + + public Failure(E e) { + this.exception = e; + } + + @Override + public boolean isSuccess() { + return false; + } + + @Override + public boolean isFailure() { + return true; + } + + @Override + public T getOrThrow() throws E { + throw exception; + } + + @Override + public Optional getData() { + return Optional.empty(); + } + + @Override + public Optional getException() { + return exception != null ? Optional.of(exception) : Optional.empty(); + } + + @SuppressWarnings("unchecked") + @Override + public Try flatMap(Function> mapper) { + return (Failure) this; + } + + @Override + public Try transformS(Function successFunc) { + return Failure.of(this.exception); + } + + @Override + public Try transformF( + Function failureFunc) { + return Failure.of(failureFunc.apply(exception)); + } + + @Override + public Try transform( + Function successFunc, Function failureFunc) { + return Failure.of(failureFunc.apply(exception)); + } + + /** Returns the thrown exception. */ + public E get() { + return exception; + } + + /** + * Method to create a {@link Failure} object, when a non-empty {@link Success} can be returned. + * + * @param exception that should be saved + * @return a {@link Failure} + * @param type of data + * @param type of exception + */ + public static Failure of(E exception) { + return new Failure<>(exception); + } + + /** + * Method to create a {@link Failure} object, when an empty {@link Success} can be returned. + * + * @param exception that should be saved + * @return a {@link Failure} + * @param type of exception + */ + public static Failure ofVoid(E exception) { + return new Failure<>(exception); + } + } + + /** + * Functional interface for the {@link Try} class. + * + * @param type of data that is supplied + * @param type of exception that could be thrown + */ + @FunctionalInterface + public interface TrySupplier { + T get() throws E; + } + + /** + * Supplier for void methods to {@link Try} class. + * + * @param type of exception that could be thrown + */ + @FunctionalInterface + public interface VoidSupplier { + void get() throws E; + } + + /** + * Supplier for exceptions. + * + * @param type of exception that could be thrown + */ + @FunctionalInterface + public interface ExceptionSupplier { + E get(); + } +} diff --git a/src/main/java/edu/ie3/datamodel/utils/validation/ConnectorValidationUtils.java b/src/main/java/edu/ie3/datamodel/utils/validation/ConnectorValidationUtils.java index bd20a6679..696f4ac9d 100644 --- a/src/main/java/edu/ie3/datamodel/utils/validation/ConnectorValidationUtils.java +++ b/src/main/java/edu/ie3/datamodel/utils/validation/ConnectorValidationUtils.java @@ -6,14 +6,26 @@ package edu.ie3.datamodel.utils.validation; import edu.ie3.datamodel.exceptions.InvalidEntityException; +import edu.ie3.datamodel.exceptions.InvalidGridException; +import edu.ie3.datamodel.exceptions.ValidationException; import edu.ie3.datamodel.models.input.NodeInput; import edu.ie3.datamodel.models.input.connector.*; import edu.ie3.datamodel.models.input.connector.type.LineTypeInput; import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput; import edu.ie3.datamodel.models.input.connector.type.Transformer3WTypeInput; +import edu.ie3.datamodel.models.input.container.SubGridContainer; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.*; import edu.ie3.util.geo.GeoUtils; import edu.ie3.util.quantities.QuantityUtil; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; import javax.measure.Quantity; +import org.jgrapht.Graph; +import org.jgrapht.alg.connectivity.ConnectivityInspector; +import org.jgrapht.graph.DefaultEdge; +import org.jgrapht.graph.SimpleGraph; import tech.units.indriya.quantity.Quantities; import tech.units.indriya.unit.Units; @@ -22,7 +34,7 @@ public class ConnectorValidationUtils extends ValidationUtils { // allowed deviation of coordinates in degree for line position check private static final double ALLOWED_COORDINATE_ERROR = 0.000001d; // allowed deviation of length in meters for line length - private static final double ALLOWED_LENGTH_ERROR = 1d; + private static final double ALLOWED_LENGTH_ERROR = 50d; // allowed deviation of voltage in kV for transformer checks private static final double ALLOWED_VOLTAGE_ERROR = 1d; @@ -38,21 +50,36 @@ private ConnectorValidationUtils() { * the checking task, based on the class of the given object. * * @param connector Connector to validate - * @throws edu.ie3.datamodel.exceptions.NotImplementedException if an unknown class is handed in + * @return a list of try objects either containing a {@link InvalidEntityException} or an empty + * Success */ - protected static void check(ConnectorInput connector) { - checkNonNull(connector, "a connector"); - connectsDifferentNodes(connector); + protected static List> check(ConnectorInput connector) { + Try isNull = checkNonNull(connector, "a connector"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); + exceptions.add(connectsDifferentNodes(connector)); // Further checks for subclasses - if (LineInput.class.isAssignableFrom(connector.getClass())) checkLine((LineInput) connector); - else if (Transformer2WInput.class.isAssignableFrom(connector.getClass())) - checkTransformer2W((Transformer2WInput) connector); - else if (Transformer3WInput.class.isAssignableFrom(connector.getClass())) - checkTransformer3W((Transformer3WInput) connector); - else if (SwitchInput.class.isAssignableFrom(connector.getClass())) - checkSwitch((SwitchInput) connector); - else throw checkNotImplementedException(connector); + if (LineInput.class.isAssignableFrom(connector.getClass())) { + exceptions.addAll(checkLine((LineInput) connector)); + } else if (Transformer2WInput.class.isAssignableFrom(connector.getClass())) { + exceptions.addAll(checkTransformer2W((Transformer2WInput) connector)); + } else if (Transformer3WInput.class.isAssignableFrom(connector.getClass())) { + exceptions.addAll(checkTransformer3W((Transformer3WInput) connector)); + } else if (SwitchInput.class.isAssignableFrom(connector.getClass())) { + exceptions.add(checkSwitch((SwitchInput) connector)); + } else { + exceptions.add( + new Failure<>( + new InvalidEntityException( + "Validation failed due to: ", buildNotImplementedException(connector)))); + } + + return exceptions; } /** @@ -67,14 +94,25 @@ else if (SwitchInput.class.isAssignableFrom(connector.getClass())) * - its coordinates of start and end point equal coordinates of nodes * * @param line Line to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkLine(LineInput line) { - checkLineType(line.getType()); - connectsNodesInDifferentSubnets(line, false); - connectsNodesWithDifferentVoltageLevels(line, false); - detectZeroOrNegativeQuantities(new Quantity[] {line.getLength()}, line); + private static List> checkLine(LineInput line) { + List> exceptions = + new ArrayList<>(checkLineType(line.getType())); + + exceptions.addAll( + Try.ofVoid( + InvalidEntityException.class, + () -> connectsNodesInDifferentSubnets(line, false), + () -> connectsNodesWithDifferentVoltageLevels(line, false), + () -> detectZeroOrNegativeQuantities(new Quantity[] {line.getLength()}, line))); + + /* these two won't throw exceptions and will only log */ coordinatesOfLineEqualCoordinatesOfNodes(line); lineLengthMatchesDistancesBetweenPointsOfLineString(line); + + return exceptions; } /** @@ -88,15 +126,27 @@ private static void checkLine(LineInput line) { * - vRated is greater 0 (Rated voltage) * * @param lineType Line type to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - protected static void checkLineType(LineTypeInput lineType) { - checkNonNull(lineType, "a line type"); - detectNegativeQuantities(new Quantity[] {lineType.getB(), lineType.getG()}, lineType); - detectZeroOrNegativeQuantities( - new Quantity[] { - lineType.getvRated(), lineType.getiMax(), lineType.getX(), lineType.getR() - }, - lineType); + protected static List> checkLineType(LineTypeInput lineType) { + Try isNull = checkNonNull(lineType, "a line type"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + return Try.ofVoid( + InvalidEntityException.class, + () -> + detectNegativeQuantities( + new Quantity[] {lineType.getB(), lineType.getG()}, lineType), + () -> + detectZeroOrNegativeQuantities( + new Quantity[] { + lineType.getvRated(), lineType.getiMax(), lineType.getX(), lineType.getR() + }, + lineType)); } /** @@ -109,13 +159,23 @@ protected static void checkLineType(LineTypeInput lineType) { * - its rated voltages match the voltages at the nodes * * @param transformer2W Transformer2W to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkTransformer2W(Transformer2WInput transformer2W) { - checkTransformer2WType(transformer2W.getType()); - checkIfTapPositionIsWithinBounds(transformer2W); - connectsNodesWithDifferentVoltageLevels(transformer2W, true); - connectsNodesInDifferentSubnets(transformer2W, true); - ratedVoltageOfTransformer2WMatchesVoltagesOfNodes(transformer2W); + private static List> checkTransformer2W( + Transformer2WInput transformer2W) { + List> exceptions = + new ArrayList<>(checkTransformer2WType(transformer2W.getType())); + + exceptions.addAll( + Try.ofVoid( + InvalidEntityException.class, + () -> checkIfTapPositionIsWithinBounds(transformer2W), + () -> connectsNodesWithDifferentVoltageLevels(transformer2W, true), + () -> connectsNodesInDifferentSubnets(transformer2W, true), + () -> ratedVoltageOfTransformer2WMatchesVoltagesOfNodes(transformer2W))); + + return exceptions; } /** @@ -134,26 +194,41 @@ private static void checkTransformer2W(Transformer2WInput transformer2W) { * - minimum tap position is smaller than maximum tap position * * @param transformer2WType Transformer2W type to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - protected static void checkTransformer2WType(Transformer2WTypeInput transformer2WType) { - checkNonNull(transformer2WType, "a two winding transformer type"); - detectNegativeQuantities( - new Quantity[] { - transformer2WType.getgM(), transformer2WType.getdPhi(), transformer2WType.getrSc() - }, - transformer2WType); - detectZeroOrNegativeQuantities( - new Quantity[] { - transformer2WType.getsRated(), - transformer2WType.getvRatedA(), - transformer2WType.getvRatedB(), - transformer2WType.getxSc() - }, - transformer2WType); - detectPositiveQuantities(new Quantity[] {transformer2WType.getbM()}, transformer2WType); - checkVoltageMagnitudeChangePerTapPosition(transformer2WType); - checkMinimumTapPositionIsLowerThanMaximumTapPosition(transformer2WType); - checkNeutralTapPositionLiesBetweenMinAndMaxTapPosition(transformer2WType); + protected static List> checkTransformer2WType( + Transformer2WTypeInput transformer2WType) { + Try isNull = + checkNonNull(transformer2WType, "a two winding transformer type"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + return Try.ofVoid( + InvalidEntityException.class, + () -> + detectNegativeQuantities( + new Quantity[] { + transformer2WType.getgM(), transformer2WType.getdPhi(), transformer2WType.getrSc() + }, + transformer2WType), + () -> + detectZeroOrNegativeQuantities( + new Quantity[] { + transformer2WType.getsRated(), + transformer2WType.getvRatedA(), + transformer2WType.getvRatedB(), + transformer2WType.getxSc() + }, + transformer2WType), + () -> + detectPositiveQuantities( + new Quantity[] {transformer2WType.getbM()}, transformer2WType), + () -> checkVoltageMagnitudeChangePerTapPosition(transformer2WType), + () -> checkMinimumTapPositionIsLowerThanMaximumTapPosition(transformer2WType), + () -> checkNeutralTapPositionLiesBetweenMinAndMaxTapPosition(transformer2WType)); } /** @@ -166,23 +241,44 @@ protected static void checkTransformer2WType(Transformer2WTypeInput transformer2 * - its rated voltages match the voltages at the nodes * * @param transformer3W Transformer3W to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkTransformer3W(Transformer3WInput transformer3W) { - checkTransformer3WType(transformer3W.getType()); - checkIfTapPositionIsWithinBounds(transformer3W); + private static List> checkTransformer3W( + Transformer3WInput transformer3W) { + List> exceptions = + new ArrayList<>(checkTransformer3WType(transformer3W.getType())); + + exceptions.add( + Try.ofVoid( + () -> checkIfTapPositionIsWithinBounds(transformer3W), InvalidEntityException.class)); + // Check if transformer connects different voltage levels - if (transformer3W.getNodeA().getVoltLvl() == transformer3W.getNodeB().getVoltLvl() - || transformer3W.getNodeA().getVoltLvl() == transformer3W.getNodeC().getVoltLvl() - || transformer3W.getNodeB().getVoltLvl() == transformer3W.getNodeC().getVoltLvl()) - throw new InvalidEntityException( - "Transformer connects nodes of the same voltage level", transformer3W); + exceptions.add( + Try.ofVoid( + transformer3W.getNodeA().getVoltLvl() == transformer3W.getNodeB().getVoltLvl() + || transformer3W.getNodeA().getVoltLvl() == transformer3W.getNodeC().getVoltLvl() + || transformer3W.getNodeB().getVoltLvl() == transformer3W.getNodeC().getVoltLvl(), + () -> + new InvalidEntityException( + "Transformer connects nodes of the same voltage level", transformer3W))); + // Check if transformer connects different subnets - if (transformer3W.getNodeA().getSubnet() == transformer3W.getNodeB().getSubnet() - || transformer3W.getNodeA().getSubnet() == transformer3W.getNodeC().getSubnet() - || transformer3W.getNodeB().getSubnet() == transformer3W.getNodeC().getSubnet()) - throw new InvalidEntityException( - "Transformer connects nodes in the same subnet", transformer3W); - ratedVoltageOfTransformer3WMatchesVoltagesOfNodes(transformer3W); + exceptions.add( + Try.ofVoid( + transformer3W.getNodeA().getSubnet() == transformer3W.getNodeB().getSubnet() + || transformer3W.getNodeA().getSubnet() == transformer3W.getNodeC().getSubnet() + || transformer3W.getNodeB().getSubnet() == transformer3W.getNodeC().getSubnet(), + () -> + new InvalidEntityException( + "Transformer connects nodes in the same subnet", transformer3W))); + + exceptions.add( + Try.ofVoid( + () -> ratedVoltageOfTransformer3WMatchesVoltagesOfNodes(transformer3W), + InvalidEntityException.class)); + + return exceptions; } /** @@ -200,26 +296,47 @@ private static void checkTransformer3W(Transformer3WInput transformer3W) { * - minimum tap position is smaller than maximum tap position
* * @param transformer3WType Transformer type to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - protected static void checkTransformer3WType(Transformer3WTypeInput transformer3WType) { - checkNonNull(transformer3WType, "a three winding transformer type"); - detectNegativeQuantities( - new Quantity[] {transformer3WType.getgM(), transformer3WType.getdPhi()}, - transformer3WType); - detectZeroOrNegativeQuantities( - new Quantity[] { - transformer3WType.getsRatedA(), transformer3WType.getsRatedB(), - transformer3WType.getsRatedC(), - transformer3WType.getvRatedA(), transformer3WType.getvRatedB(), - transformer3WType.getvRatedC(), - transformer3WType.getrScA(), transformer3WType.getrScB(), transformer3WType.getrScC(), - transformer3WType.getxScA(), transformer3WType.getxScB(), transformer3WType.getxScC() - }, - transformer3WType); - detectPositiveQuantities(new Quantity[] {transformer3WType.getbM()}, transformer3WType); - checkVoltageMagnitudeChangePerTapPosition(transformer3WType); - checkMinimumTapPositionIsLowerThanMaximumTapPosition(transformer3WType); - checkNeutralTapPositionLiesBetweenMinAndMaxTapPosition(transformer3WType); + protected static List> checkTransformer3WType( + Transformer3WTypeInput transformer3WType) { + Try isNull = + checkNonNull(transformer3WType, "a three winding transformer type"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + return Try.ofVoid( + InvalidEntityException.class, + () -> + detectNegativeQuantities( + new Quantity[] {transformer3WType.getgM(), transformer3WType.getdPhi()}, + transformer3WType), + () -> + detectZeroOrNegativeQuantities( + new Quantity[] { + transformer3WType.getsRatedA(), + transformer3WType.getsRatedB(), + transformer3WType.getsRatedC(), + transformer3WType.getvRatedA(), + transformer3WType.getvRatedB(), + transformer3WType.getvRatedC(), + transformer3WType.getrScA(), + transformer3WType.getrScB(), + transformer3WType.getrScC(), + transformer3WType.getxScA(), + transformer3WType.getxScB(), + transformer3WType.getxScC() + }, + transformer3WType), + () -> + detectPositiveQuantities( + new Quantity[] {transformer3WType.getbM()}, transformer3WType), + () -> checkVoltageMagnitudeChangePerTapPosition(transformer3WType), + () -> checkMinimumTapPositionIsLowerThanMaximumTapPosition(transformer3WType), + () -> checkNeutralTapPositionLiesBetweenMinAndMaxTapPosition(transformer3WType)); } /** @@ -227,26 +344,80 @@ protected static void checkTransformer3WType(Transformer3WTypeInput transformer3 * - its connected nodes are in the same voltage level * * @param switchInput Switch to validate + * @return a try object either containing an {@link InvalidEntityException} or an empty Success */ - private static void checkSwitch(SwitchInput switchInput) { - if (!switchInput.getNodeA().getVoltLvl().equals(switchInput.getNodeB().getVoltLvl())) - throw new InvalidEntityException("Switch connects two different voltage levels", switchInput); + private static Try checkSwitch(SwitchInput switchInput) { + return Try.ofVoid( + !switchInput.getNodeA().getVoltLvl().equals(switchInput.getNodeB().getVoltLvl()), + () -> + new InvalidEntityException( + "Switch connects two different voltage levels", switchInput)); /* Remark: Connecting two different "subnets" is fine, because as of our definition regarding a switchgear in * "upstream" direction of a transformer, all the nodes, that hare within the switch chain, belong to the lower * grid, whilst the "real" upper node is within the upper grid */ } + /** + * Check if all given elements are connected. + * + * @param subGridContainer the subgrid to check the connectivity for + * @return a try object either containing an {@link InvalidGridException} or an empty Success + */ + protected static Try checkConnectivity( + SubGridContainer subGridContainer) { + Graph graph = new SimpleGraph<>(DefaultEdge.class); + + subGridContainer.getRawGrid().getNodes().forEach(node -> graph.addVertex(node.getUuid())); + subGridContainer + .getRawGrid() + .getLines() + .forEach(line -> graph.addEdge(line.getNodeA().getUuid(), line.getNodeB().getUuid())); + subGridContainer + .getRawGrid() + .getTransformer2Ws() + .forEach( + trafo2w -> graph.addEdge(trafo2w.getNodeA().getUuid(), trafo2w.getNodeB().getUuid())); + subGridContainer + .getRawGrid() + .getTransformer3Ws() + .forEach( + trafor3w -> { + graph.addEdge(trafor3w.getNodeA().getUuid(), trafor3w.getNodeInternal().getUuid()); + graph.addEdge(trafor3w.getNodeInternal().getUuid(), trafor3w.getNodeB().getUuid()); + graph.addEdge(trafor3w.getNodeInternal().getUuid(), trafor3w.getNodeC().getUuid()); + }); + subGridContainer + .getRawGrid() + .getSwitches() + .forEach( + switches -> + graph.addEdge(switches.getNodeA().getUuid(), switches.getNodeB().getUuid())); + + ConnectivityInspector inspector = new ConnectivityInspector<>(graph); + + return Try.ofVoid( + !inspector.isConnected(), + () -> + new InvalidGridException( + "The grid with subnetNo " + + subGridContainer.getSubnet() + + " is not connected! Please ensure that all elements are connected correctly!")); + } + /** * Check that a connector connects different nodes * * @param connectorInput connectorInput to validate */ - private static void connectsDifferentNodes(ConnectorInput connectorInput) { - if (connectorInput.getNodeA() == connectorInput.getNodeB()) { - throw new InvalidEntityException( - connectorInput.getClass().getSimpleName() + " connects the same node, but shouldn't", - connectorInput); - } + private static Try connectsDifferentNodes( + ConnectorInput connectorInput) { + return Try.ofVoid( + connectorInput.getNodeA().equals(connectorInput.getNodeB()), + () -> + new InvalidEntityException( + connectorInput.getClass().getSimpleName() + + " connects the same node, but shouldn't", + connectorInput)); } /** @@ -255,7 +426,8 @@ private static void connectsDifferentNodes(ConnectorInput connectorInput) { * @param connectorInput ConnectorInput to validate * @param yes determines if subnets should be equal or not */ - private static void connectsNodesInDifferentSubnets(ConnectorInput connectorInput, boolean yes) { + private static void connectsNodesInDifferentSubnets(ConnectorInput connectorInput, boolean yes) + throws InvalidEntityException { if (yes) { if (connectorInput.getNodeA().getSubnet() == connectorInput.getNodeB().getSubnet()) { throw new InvalidEntityException( @@ -280,7 +452,7 @@ private static void connectsNodesInDifferentSubnets(ConnectorInput connectorInpu * @param yes determines if voltage levels should be equal or not */ private static void connectsNodesWithDifferentVoltageLevels( - ConnectorInput connectorInput, boolean yes) { + ConnectorInput connectorInput, boolean yes) throws InvalidEntityException { if (yes) { if (connectorInput.getNodeA().getVoltLvl().equals(connectorInput.getNodeB().getVoltLvl())) { throw new InvalidEntityException( @@ -311,16 +483,18 @@ private static void coordinatesOfLineEqualCoordinatesOfNodes(LineInput line) { || line.getGeoPosition() .getEndPoint() .isWithinDistance(line.getNodeA().getGeoPosition(), ALLOWED_COORDINATE_ERROR))) - throw new InvalidEntityException( - "Coordinates of start and end point do not match coordinates of connected nodes", line); + logger.warn( + "Coordinates of start and end point do not match coordinates of connected nodes: {}", + line); if (!(line.getGeoPosition() .getStartPoint() .isWithinDistance(line.getNodeB().getGeoPosition(), ALLOWED_COORDINATE_ERROR) || line.getGeoPosition() .getEndPoint() .isWithinDistance(line.getNodeB().getGeoPosition(), ALLOWED_COORDINATE_ERROR))) - throw new InvalidEntityException( - "Coordinates of start and end point do not match coordinates of connected nodes", line); + logger.warn( + "Coordinates of start and end point do not match coordinates of connected nodes: {}", + line); } /** @@ -332,10 +506,13 @@ private static void lineLengthMatchesDistancesBetweenPointsOfLineString(LineInpu // only if not geo positions of both nodes are dummy values if ((line.getNodeA().getGeoPosition() != NodeInput.DEFAULT_GEO_POSITION || line.getNodeB().getGeoPosition() != NodeInput.DEFAULT_GEO_POSITION) - && !QuantityUtil.isEquivalentAbs( - line.getLength(), GeoUtils.calcHaversine(line.getGeoPosition()), ALLOWED_LENGTH_ERROR)) - throw new InvalidEntityException( - "Line length does not equal calculated distances between points building the line", line); + && line.getLength() + .isGreaterThan( + GeoUtils.calcHaversine(line.getGeoPosition()).multiply(ALLOWED_LENGTH_ERROR))) { + logger.warn( + "Line length is more than {}% greater than the calculated distances between points building the line: {}", + ALLOWED_LENGTH_ERROR, line); + } } /** @@ -343,7 +520,8 @@ private static void lineLengthMatchesDistancesBetweenPointsOfLineString(LineInpu * * @param transformer2W Transformer2WInput to validate */ - private static void checkIfTapPositionIsWithinBounds(Transformer2WInput transformer2W) { + private static void checkIfTapPositionIsWithinBounds(Transformer2WInput transformer2W) + throws InvalidEntityException { if (transformer2W.getTapPos() < transformer2W.getType().getTapMin() || transformer2W.getTapPos() > transformer2W.getType().getTapMax()) throw new InvalidEntityException( @@ -356,7 +534,8 @@ private static void checkIfTapPositionIsWithinBounds(Transformer2WInput transfor * * @param transformer3W Transformer3WInput to validate */ - private static void checkIfTapPositionIsWithinBounds(Transformer3WInput transformer3W) { + private static void checkIfTapPositionIsWithinBounds(Transformer3WInput transformer3W) + throws InvalidEntityException { if (transformer3W.getTapPos() < transformer3W.getType().getTapMin() || transformer3W.getTapPos() > transformer3W.getType().getTapMax()) throw new InvalidEntityException( @@ -370,7 +549,7 @@ private static void checkIfTapPositionIsWithinBounds(Transformer3WInput transfor * @param transformer2W Transformer2WInput to validate */ private static void ratedVoltageOfTransformer2WMatchesVoltagesOfNodes( - Transformer2WInput transformer2W) { + Transformer2WInput transformer2W) throws InvalidEntityException { if (!QuantityUtil.isEquivalentAbs( transformer2W.getType().getvRatedA(), transformer2W.getNodeA().getVoltLvl().getNominalVoltage(), @@ -392,7 +571,7 @@ private static void ratedVoltageOfTransformer2WMatchesVoltagesOfNodes( * @param transformer3W Transformer3WInput to validate */ private static void ratedVoltageOfTransformer3WMatchesVoltagesOfNodes( - Transformer3WInput transformer3W) { + Transformer3WInput transformer3W) throws InvalidEntityException { if (!QuantityUtil.isEquivalentAbs( transformer3W.getType().getvRatedA(), transformer3W.getNodeA().getVoltLvl().getNominalVoltage(), @@ -419,7 +598,7 @@ private static void ratedVoltageOfTransformer3WMatchesVoltagesOfNodes( * @param transformer2WType Transformer2WTypeInput to validate */ private static void checkVoltageMagnitudeChangePerTapPosition( - Transformer2WTypeInput transformer2WType) { + Transformer2WTypeInput transformer2WType) throws InvalidEntityException { if (transformer2WType.getdV().isLessThan(Quantities.getQuantity(0d, Units.PERCENT)) || transformer2WType.getdV().isGreaterThan(Quantities.getQuantity(100d, Units.PERCENT))) throw new InvalidEntityException( @@ -434,7 +613,7 @@ private static void checkVoltageMagnitudeChangePerTapPosition( * @param transformer3WType Transformer3WTypeInput to validate */ private static void checkVoltageMagnitudeChangePerTapPosition( - Transformer3WTypeInput transformer3WType) { + Transformer3WTypeInput transformer3WType) throws InvalidEntityException { if (transformer3WType.getdV().isLessThan(Quantities.getQuantity(0d, Units.PERCENT)) || transformer3WType.getdV().isGreaterThan(Quantities.getQuantity(100d, Units.PERCENT))) throw new InvalidEntityException( @@ -448,7 +627,7 @@ private static void checkVoltageMagnitudeChangePerTapPosition( * @param transformer2WType Transformer2WTypeInput to validate */ private static void checkMinimumTapPositionIsLowerThanMaximumTapPosition( - Transformer2WTypeInput transformer2WType) { + Transformer2WTypeInput transformer2WType) throws InvalidEntityException { if (transformer2WType.getTapMax() < transformer2WType.getTapMin()) throw new InvalidEntityException( "Minimum tap position must be lower than maximum tap position", transformer2WType); @@ -460,7 +639,7 @@ private static void checkMinimumTapPositionIsLowerThanMaximumTapPosition( * @param transformer3WType Transformer3WTypeInput to validate */ private static void checkMinimumTapPositionIsLowerThanMaximumTapPosition( - Transformer3WTypeInput transformer3WType) { + Transformer3WTypeInput transformer3WType) throws InvalidEntityException { if (transformer3WType.getTapMax() < transformer3WType.getTapMin()) throw new InvalidEntityException( "Minimum tap position must be lower than maximum tap position", transformer3WType); @@ -472,7 +651,7 @@ private static void checkMinimumTapPositionIsLowerThanMaximumTapPosition( * @param transformer2WType Transformer3WTypeInput to validate */ private static void checkNeutralTapPositionLiesBetweenMinAndMaxTapPosition( - Transformer2WTypeInput transformer2WType) { + Transformer2WTypeInput transformer2WType) throws InvalidEntityException { if (transformer2WType.getTapNeutr() < transformer2WType.getTapMin() || transformer2WType.getTapNeutr() > transformer2WType.getTapMax()) throw new InvalidEntityException( @@ -486,7 +665,7 @@ private static void checkNeutralTapPositionLiesBetweenMinAndMaxTapPosition( * @param transformer3WType Transformer3WTypeInput to validate */ private static void checkNeutralTapPositionLiesBetweenMinAndMaxTapPosition( - Transformer3WTypeInput transformer3WType) { + Transformer3WTypeInput transformer3WType) throws InvalidEntityException { if (transformer3WType.getTapNeutr() < transformer3WType.getTapMin() || transformer3WType.getTapNeutr() > transformer3WType.getTapMax()) throw new InvalidEntityException( diff --git a/src/main/java/edu/ie3/datamodel/utils/validation/GraphicValidationUtils.java b/src/main/java/edu/ie3/datamodel/utils/validation/GraphicValidationUtils.java index d97615b5a..e47eadf61 100644 --- a/src/main/java/edu/ie3/datamodel/utils/validation/GraphicValidationUtils.java +++ b/src/main/java/edu/ie3/datamodel/utils/validation/GraphicValidationUtils.java @@ -9,6 +9,9 @@ import edu.ie3.datamodel.models.input.graphics.GraphicInput; import edu.ie3.datamodel.models.input.graphics.LineGraphicInput; import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput; +import edu.ie3.datamodel.utils.Try; +import java.util.ArrayList; +import java.util.List; public class GraphicValidationUtils extends ValidationUtils { @@ -26,19 +29,33 @@ private GraphicValidationUtils() { * fulfill the checking task, based on the class of the given object. * * @param graphicInput GraphicInput to validate - * @throws edu.ie3.datamodel.exceptions.NotImplementedException if an unknown class is handed in + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - protected static void check(GraphicInput graphicInput) { - checkNonNull(graphicInput, "a graphic input"); - if (graphicInput.getGraphicLayer() == null) - throw new InvalidEntityException( - "Graphic Layer of graphic element is not defined", graphicInput); + protected static List> check(GraphicInput graphicInput) { + Try isNull = checkNonNull(graphicInput, "a graphic input"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); + + exceptions.add( + Try.ofVoid( + graphicInput.getGraphicLayer() == null, + () -> + new InvalidEntityException( + "Graphic Layer of graphic element is not defined", graphicInput))); // Further checks for subclasses - if (LineGraphicInput.class.isAssignableFrom(graphicInput.getClass())) - checkLineGraphicInput((LineGraphicInput) graphicInput); - if (NodeGraphicInput.class.isAssignableFrom(graphicInput.getClass())) - checkNodeGraphicInput((NodeGraphicInput) graphicInput); + if (LineGraphicInput.class.isAssignableFrom(graphicInput.getClass())) { + exceptions.add(checkLineGraphicInput((LineGraphicInput) graphicInput)); + } else if (NodeGraphicInput.class.isAssignableFrom(graphicInput.getClass())) { + exceptions.add(checkNodeGraphicInput((NodeGraphicInput) graphicInput)); + } + + return exceptions; } /** @@ -47,10 +64,13 @@ protected static void check(GraphicInput graphicInput) { * * @param lineGraphicInput LineGraphicInput to validate */ - private static void checkLineGraphicInput(LineGraphicInput lineGraphicInput) { - if (lineGraphicInput.getPath() == null) - throw new InvalidEntityException( - "Path of line graphic element is not defined", lineGraphicInput); + private static Try checkLineGraphicInput( + LineGraphicInput lineGraphicInput) { + return Try.ofVoid( + lineGraphicInput.getPath() == null, + () -> + new InvalidEntityException( + "Path of line graphic element is not defined", lineGraphicInput)); } /** @@ -60,8 +80,10 @@ private static void checkLineGraphicInput(LineGraphicInput lineGraphicInput) { * * @param nodeGraphicInput NodeGraphicInput to validate */ - private static void checkNodeGraphicInput(NodeGraphicInput nodeGraphicInput) { - if (nodeGraphicInput.getPoint() == null) - throw new InvalidEntityException("Point of node graphic is not defined", nodeGraphicInput); + private static Try checkNodeGraphicInput( + NodeGraphicInput nodeGraphicInput) { + return Try.ofVoid( + nodeGraphicInput.getPoint() == null, + () -> new InvalidEntityException("Point of node graphic is not defined", nodeGraphicInput)); } } diff --git a/src/main/java/edu/ie3/datamodel/utils/validation/GridContainerValidationUtils.java b/src/main/java/edu/ie3/datamodel/utils/validation/GridContainerValidationUtils.java index a25141711..9f2447d0b 100644 --- a/src/main/java/edu/ie3/datamodel/utils/validation/GridContainerValidationUtils.java +++ b/src/main/java/edu/ie3/datamodel/utils/validation/GridContainerValidationUtils.java @@ -7,6 +7,8 @@ import edu.ie3.datamodel.exceptions.InvalidEntityException; import edu.ie3.datamodel.exceptions.InvalidGridException; +import edu.ie3.datamodel.exceptions.UnsafeEntityException; +import edu.ie3.datamodel.exceptions.ValidationException; import edu.ie3.datamodel.models.input.AssetInput; import edu.ie3.datamodel.models.input.MeasurementUnitInput; import edu.ie3.datamodel.models.input.NodeInput; @@ -14,8 +16,11 @@ import edu.ie3.datamodel.models.input.connector.LineInput; import edu.ie3.datamodel.models.input.connector.Transformer3WInput; import edu.ie3.datamodel.models.input.container.*; +import edu.ie3.datamodel.models.input.graphics.GraphicInput; import edu.ie3.datamodel.models.input.system.SystemParticipantInput; import edu.ie3.datamodel.utils.ContainerUtils; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.*; import java.util.*; import java.util.stream.Stream; @@ -38,25 +43,45 @@ private GridContainerValidationUtils() { * Checks a complete grid data container * * @param gridContainer Grid model to check + * @return a list of try objects either containing an {@link ValidationException} or an empty + * Success */ - protected static void check(GridContainer gridContainer) { - checkNonNull(gridContainer, "grid container"); + protected static List> check( + GridContainer gridContainer) { + Try isNull = checkNonNull(gridContainer, "grid container"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); /* sanity check to ensure distinct UUIDs */ Optional exceptionString = checkForDuplicateUuids(new HashSet<>(gridContainer.allEntitiesAsList())); - if (exceptionString.isPresent()) { - throw new InvalidGridException( - duplicateUuidsString(gridContainer.getClass().getSimpleName(), exceptionString)); + exceptions.add( + Try.ofVoid( + exceptionString.isPresent(), + () -> + new InvalidGridException( + duplicateUuidsString( + gridContainer.getClass().getSimpleName(), exceptionString)))); + + exceptions.addAll(checkRawGridElements(gridContainer.getRawGrid())); + exceptions.addAll( + checkSystemParticipants( + gridContainer.getSystemParticipants(), gridContainer.getRawGrid().getNodes())); + exceptions.addAll( + checkGraphicElements( + gridContainer.getGraphics(), + gridContainer.getRawGrid().getNodes(), + gridContainer.getRawGrid().getLines())); + + if (gridContainer instanceof SubGridContainer subGridContainer) { + exceptions.add(ConnectorValidationUtils.checkConnectivity(subGridContainer)); } - checkRawGridElements(gridContainer.getRawGrid()); - checkSystemParticipants( - gridContainer.getSystemParticipants(), gridContainer.getRawGrid().getNodes()); - checkGraphicElements( - gridContainer.getGraphics(), - gridContainer.getRawGrid().getNodes(), - gridContainer.getRawGrid().getLines()); + return exceptions; } /** @@ -64,18 +89,29 @@ protected static void check(GridContainer gridContainer) { * as the fact, that none of the assets is connected to a node, that is not in the set of nodes. * * @param rawGridElements Raw grid elements - * @throws InvalidGridException If something is wrong + * @return a list of try objects either containing an {@link ValidationException} or an empty + * Success */ - protected static void checkRawGridElements(RawGridElements rawGridElements) { - checkNonNull(rawGridElements, "raw grid elements"); + protected static List> checkRawGridElements( + RawGridElements rawGridElements) { + Try isNull = checkNonNull(rawGridElements, "raw grid elements"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); /* sanity check to ensure distinct UUIDs */ Optional exceptionString = checkForDuplicateUuids(new HashSet<>(rawGridElements.allEntitiesAsList())); - if (exceptionString.isPresent()) { - throw new InvalidGridException( - duplicateUuidsString(rawGridElements.getClass().getSimpleName(), exceptionString)); - } + exceptions.add( + Try.ofVoid( + exceptionString.isPresent(), + () -> + new InvalidGridException( + duplicateUuidsString( + rawGridElements.getClass().getSimpleName(), exceptionString)))); /* Checking nodes */ Set nodes = rawGridElements.getNodes(); @@ -86,8 +122,8 @@ protected static void checkRawGridElements(RawGridElements rawGridElements) { .getLines() .forEach( line -> { - checkNodeAvailability(line, nodes); - ConnectorValidationUtils.check(line); + exceptions.add(checkNodeAvailability(line, nodes)); + exceptions.addAll(ConnectorValidationUtils.check(line)); }); /* Checking two winding transformers */ @@ -95,8 +131,8 @@ protected static void checkRawGridElements(RawGridElements rawGridElements) { .getTransformer2Ws() .forEach( transformer -> { - checkNodeAvailability(transformer, nodes); - ConnectorValidationUtils.check(transformer); + exceptions.add(checkNodeAvailability(transformer, nodes)); + exceptions.addAll(ConnectorValidationUtils.check(transformer)); }); /* Checking three winding transformers */ @@ -104,8 +140,8 @@ protected static void checkRawGridElements(RawGridElements rawGridElements) { .getTransformer3Ws() .forEach( transformer -> { - checkNodeAvailability(transformer, nodes); - ConnectorValidationUtils.check(transformer); + exceptions.add(checkNodeAvailability(transformer, nodes)); + exceptions.addAll(ConnectorValidationUtils.check(transformer)); }); /* Checking switches @@ -127,8 +163,8 @@ protected static void checkRawGridElements(RawGridElements rawGridElements) { .getSwitches() .forEach( switcher -> { - checkNodeAvailability(switcher, validSwitchNodes); - ConnectorValidationUtils.check(switcher); + exceptions.add(checkNodeAvailability(switcher, validSwitchNodes)); + exceptions.addAll(ConnectorValidationUtils.check(switcher)); }); /* Checking measurement units */ @@ -136,9 +172,33 @@ protected static void checkRawGridElements(RawGridElements rawGridElements) { .getMeasurementUnits() .forEach( measurement -> { - checkNodeAvailability(measurement, nodes); - MeasurementUnitValidationUtils.check(measurement); + exceptions.add(checkNodeAvailability(measurement, nodes)); + exceptions.add(MeasurementUnitValidationUtils.check(measurement)); }); + + exceptions.addAll(checkRawGridTypeIds(rawGridElements)); + + return exceptions; + } + + /** + * Checks the validity of type ids of every entity. + * + * @param rawGridElements the raw grid elements + * @return a list of try objects either containing an {@link UnsafeEntityException} or an empty + * Success + */ + protected static List> checkRawGridTypeIds( + RawGridElements rawGridElements) { + List> exceptions = new ArrayList<>(); + exceptions.addAll(ValidationUtils.checkIds(rawGridElements.getNodes())); + exceptions.addAll(ValidationUtils.checkIds(rawGridElements.getLines())); + exceptions.addAll(ValidationUtils.checkIds(rawGridElements.getTransformer2Ws())); + exceptions.addAll(ValidationUtils.checkIds(rawGridElements.getTransformer3Ws())); + exceptions.addAll(ValidationUtils.checkIds(rawGridElements.getSwitches())); + exceptions.addAll(ValidationUtils.checkIds(rawGridElements.getMeasurementUnits())); + + return exceptions; } /** @@ -147,86 +207,94 @@ protected static void checkRawGridElements(RawGridElements rawGridElements) { * * @param systemParticipants The system participants * @param nodes Set of already known nodes + * @return a list of try objects either containing an {@link ValidationException} or an empty + * Success */ - protected static void checkSystemParticipants( + protected static List> checkSystemParticipants( SystemParticipants systemParticipants, Set nodes) { - checkNonNull(systemParticipants, "system participants"); + Try isNull = + checkNonNull(systemParticipants, "system participants"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); // sanity check for distinct uuids Optional exceptionString = ValidationUtils.checkForDuplicateUuids( new HashSet<>(systemParticipants.allEntitiesAsList())); - if (exceptionString.isPresent()) { - throw new InvalidGridException( - duplicateUuidsString(systemParticipants.getClass().getSimpleName(), exceptionString)); - } - - systemParticipants - .getBmPlants() - .forEach( - entity -> { - checkNodeAvailability(entity, nodes); - SystemParticipantValidationUtils.check(entity); - }); - - systemParticipants - .getChpPlants() - .forEach( - entity -> { - checkNodeAvailability(entity, nodes); - SystemParticipantValidationUtils.check(entity); - }); - - /* TODO: Electric vehicle charging systems are currently only dummy implementation. if this has changed, the whole - * method can be aggregated */ - - systemParticipants - .getFixedFeedIns() - .forEach( - entity -> { - checkNodeAvailability(entity, nodes); - SystemParticipantValidationUtils.check(entity); - }); - - systemParticipants - .getHeatPumps() - .forEach( - entity -> { - checkNodeAvailability(entity, nodes); - SystemParticipantValidationUtils.check(entity); - }); + exceptions.add( + Try.ofVoid( + exceptionString.isPresent(), + () -> + new InvalidGridException( + duplicateUuidsString( + systemParticipants.getClass().getSimpleName(), exceptionString)))); + + exceptions.addAll(checkSystemParticipants(systemParticipants.getBmPlants(), nodes)); + exceptions.addAll(checkSystemParticipants(systemParticipants.getChpPlants(), nodes)); + exceptions.addAll(checkSystemParticipants(systemParticipants.getEvCS(), nodes)); + exceptions.addAll(checkSystemParticipants(systemParticipants.getFixedFeedIns(), nodes)); + exceptions.addAll(checkSystemParticipants(systemParticipants.getHeatPumps(), nodes)); + exceptions.addAll(checkSystemParticipants(systemParticipants.getLoads(), nodes)); + exceptions.addAll(checkSystemParticipants(systemParticipants.getPvPlants(), nodes)); + exceptions.addAll(checkSystemParticipants(systemParticipants.getStorages(), nodes)); + exceptions.addAll(checkSystemParticipants(systemParticipants.getWecPlants(), nodes)); + exceptions.addAll(checkSystemParticipantsTypeIds(systemParticipants)); + + return exceptions; + } - systemParticipants - .getLoads() - .forEach( + /** + * Checks the validity of specific system participant. Moreover, it checks, if the systems are + * connected to a node that is not in the provided set + * + * @param participants a set of specific system participants + * @param nodes Set of already known nodes + * @return a list of try objects either containing an {@link ValidationException} or an empty + * Success + */ + protected static List> checkSystemParticipants( + Set participants, Set nodes) { + return participants.stream() + .map( entity -> { - checkNodeAvailability(entity, nodes); - SystemParticipantValidationUtils.check(entity); - }); + List> exceptions = new ArrayList<>(); - systemParticipants - .getPvPlants() - .forEach( - entity -> { - checkNodeAvailability(entity, nodes); - SystemParticipantValidationUtils.check(entity); - }); + exceptions.add(checkNodeAvailability(entity, nodes)); + exceptions.addAll(SystemParticipantValidationUtils.check(entity)); - systemParticipants - .getStorages() - .forEach( - entity -> { - checkNodeAvailability(entity, nodes); - SystemParticipantValidationUtils.check(entity); - }); + return exceptions; + }) + .flatMap(List::stream) + .toList(); + } - systemParticipants - .getWecPlants() - .forEach( - entity -> { - checkNodeAvailability(entity, nodes); - SystemParticipantValidationUtils.check(entity); - }); + /** + * Checks the validity of type ids of every entity. + * + * @param systemParticipants the system participants + * @return a list of try objects either containing an {@link UnsafeEntityException} or an empty + * Success + */ + protected static List> checkSystemParticipantsTypeIds( + SystemParticipants systemParticipants) { + List> exceptions = new ArrayList<>(); + exceptions.addAll(ValidationUtils.checkIds(systemParticipants.getBmPlants())); + exceptions.addAll(ValidationUtils.checkIds(systemParticipants.getChpPlants())); + exceptions.addAll(ValidationUtils.checkIds(systemParticipants.getEvCS())); + exceptions.addAll(ValidationUtils.checkIds(systemParticipants.getEvs())); + exceptions.addAll(ValidationUtils.checkIds(systemParticipants.getFixedFeedIns())); + exceptions.addAll(ValidationUtils.checkIds(systemParticipants.getHeatPumps())); + exceptions.addAll(ValidationUtils.checkIds(systemParticipants.getLoads())); + exceptions.addAll(ValidationUtils.checkIds(systemParticipants.getPvPlants())); + exceptions.addAll(ValidationUtils.checkIds(systemParticipants.getStorages())); + exceptions.addAll(ValidationUtils.checkIds(systemParticipants.getWecPlants())); + exceptions.addAll(ValidationUtils.checkIds(systemParticipants.getEmSystems())); + + return exceptions; } /** @@ -235,112 +303,121 @@ protected static void checkSystemParticipants( * @param graphicElements Elements to check * @param nodes Already known and checked nodes * @param lines Already known and checked lines + * @return a list of try objects either containing an {@link ValidationException} or an empty + * Success */ - protected static void checkGraphicElements( + protected static List> checkGraphicElements( GraphicElements graphicElements, Set nodes, Set lines) { - checkNonNull(graphicElements, "graphic elements"); + Try isNull = checkNonNull(graphicElements, "graphic elements"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); // sanity check for distinct uuids Optional exceptionString = checkForDuplicateUuids(new HashSet<>(graphicElements.allEntitiesAsList())); - if (exceptionString.isPresent()) { - throw new InvalidGridException( - duplicateUuidsString(graphicElements.getClass().getSimpleName(), exceptionString)); - } + exceptions.add( + Try.ofVoid( + exceptionString.isPresent(), + () -> + new InvalidGridException( + duplicateUuidsString( + graphicElements.getClass().getSimpleName(), exceptionString)))); graphicElements .getNodeGraphics() .forEach( graphic -> { - GraphicValidationUtils.check(graphic); - if (!nodes.contains(graphic.getNode())) - throw new InvalidEntityException( - "The node graphic with uuid '" - + graphic.getUuid() - + "' refers to node with uuid '" - + graphic.getNode().getUuid() - + "', that is not among the provided ones.", - graphic); + exceptions.addAll(GraphicValidationUtils.check(graphic)); + exceptions.add( + Try.ofVoid( + !nodes.contains(graphic.getNode()), + () -> + buildGraphicExceptionMessage( + graphic, "node", graphic.getNode().getUuid()))); }); graphicElements .getLineGraphics() .forEach( graphic -> { - GraphicValidationUtils.check(graphic); - if (!lines.contains(graphic.getLine())) - throw new InvalidEntityException( - "The line graphic with uuid '" - + graphic.getUuid() - + "' refers to line with uuid '" - + graphic.getLine().getUuid() - + "', that is not among the provided ones.", - graphic); + exceptions.addAll(GraphicValidationUtils.check(graphic)); + exceptions.add( + Try.ofVoid( + !lines.contains(graphic.getLine()), + () -> + buildGraphicExceptionMessage( + graphic, "line", graphic.getLine().getUuid()))); }); - } - - /** - * Checks, if the nodes of the {@link ConnectorInput} are in the collection of provided, already - * determined nodes - * - * @param connector Connector to examine - * @param nodes Permissible, already known nodes - */ - private static void checkNodeAvailability(ConnectorInput connector, Collection nodes) { - if (!nodes.containsAll(Arrays.asList(connector.getNodeA(), connector.getNodeB()))) - throw getMissingNodeException(connector); - } - /** - * Checks, if the nodes of the {@link Transformer3WInput} are in the collection of provided, - * already determined nodes - * - * @param transformer Transformer to examine - * @param nodes Permissible, already known nodes - */ - private static void checkNodeAvailability( - Transformer3WInput transformer, Collection nodes) { - if (!nodes.containsAll( - Arrays.asList(transformer.getNodeA(), transformer.getNodeB(), transformer.getNodeC()))) - throw getMissingNodeException(transformer); + return exceptions; } /** - * Checks, if the node of the {@link SystemParticipantInput} are in the collection of provided, - * already determined nodes + * Checks if the node(s) of the given {@link AssetInput} are in the collection of provided already + * determined nodes. * - * @param participant Connector to examine - * @param nodes Permissible, already known nodes + * @param input asset to examine + * @param nodes permissible, already known nodes + * @return either an {@link InvalidGridException} wrapped in a {@link Failure} or an empty {@link + * Success} */ - private static void checkNodeAvailability( - SystemParticipantInput participant, Collection nodes) { - if (!nodes.contains(participant.getNode())) throw getMissingNodeException(participant); - } + private static Try checkNodeAvailability( + AssetInput input, Collection nodes) { + boolean available; + + if (input instanceof Transformer3WInput transformer) { + available = + !nodes.containsAll( + Arrays.asList( + transformer.getNodeA(), transformer.getNodeB(), transformer.getNodeC())); + } else if (input instanceof ConnectorInput connector) { + available = !nodes.containsAll(Arrays.asList(connector.getNodeA(), connector.getNodeB())); + } else if (input instanceof SystemParticipantInput participant) { + available = !nodes.contains(participant.getNode()); + } else if (input instanceof MeasurementUnitInput measurementUnit) { + available = !nodes.contains(measurementUnit.getNode()); + } else { + return Failure.ofVoid( + new InvalidGridException( + "Checking the node availability of" + + input.getClass().getSimpleName() + + " is not implemented.")); + } - /** - * Checks, if the node of the {@link MeasurementUnitInput} are in the collection of provided, - * already determined nodes - * - * @param measurementUnit Connector to examine - * @param nodes Permissible, already known nodes - */ - private static void checkNodeAvailability( - MeasurementUnitInput measurementUnit, Collection nodes) { - if (!nodes.contains(measurementUnit.getNode())) throw getMissingNodeException(measurementUnit); + return Try.ofVoid( + available, + () -> + new InvalidGridException( + input.getClass().getSimpleName() + + " " + + input + + " is connected to a node that is not in the set of nodes.")); } /** - * Builds an exception, that announces, that the given input is connected to a node, that is not - * in the set of nodes provided. + * Creates a {@link InvalidEntityException} for graphic inputs. * - * @param input Input model - * @return Exception for a missing node + * @param graphic input + * @param type of the graphic + * @param asset uuid of the referred asset + * @return a {@link Failure} */ - private static InvalidGridException getMissingNodeException(AssetInput input) { - return new InvalidGridException( - input.getClass().getSimpleName() - + " " - + input - + " is connected to a node that is not in the set of nodes."); + private static InvalidEntityException buildGraphicExceptionMessage( + GraphicInput graphic, String type, UUID asset) { + return new InvalidEntityException( + "The " + + type + + " graphic with uuid '" + + graphic.getUuid() + + "' refers to " + + type + + " with uuid '" + + asset + + "', that is not a,ong the provided ones.", + graphic); } } diff --git a/src/main/java/edu/ie3/datamodel/utils/validation/MeasurementUnitValidationUtils.java b/src/main/java/edu/ie3/datamodel/utils/validation/MeasurementUnitValidationUtils.java index 6486dd1ad..318cfd13f 100644 --- a/src/main/java/edu/ie3/datamodel/utils/validation/MeasurementUnitValidationUtils.java +++ b/src/main/java/edu/ie3/datamodel/utils/validation/MeasurementUnitValidationUtils.java @@ -5,8 +5,11 @@ */ package edu.ie3.datamodel.utils.validation; +import edu.ie3.datamodel.exceptions.InvalidEntityException; import edu.ie3.datamodel.exceptions.UnsafeEntityException; +import edu.ie3.datamodel.exceptions.ValidationException; import edu.ie3.datamodel.models.input.MeasurementUnitInput; +import edu.ie3.datamodel.utils.Try; public class MeasurementUnitValidationUtils extends ValidationUtils { @@ -21,14 +24,23 @@ private MeasurementUnitValidationUtils() { * - any values are measured * * @param measurementUnit Measurement unit to validate + * @return a try object either containing an {@link ValidationException} or an empty Success */ - protected static void check(MeasurementUnitInput measurementUnit) { - checkNonNull(measurementUnit, "a measurement unit"); - if (!measurementUnit.getP() - && !measurementUnit.getQ() - && !measurementUnit.getVAng() - && !measurementUnit.getVMag()) - throw new UnsafeEntityException( - "Measurement Unit does not measure any values", measurementUnit); + protected static Try check( + MeasurementUnitInput measurementUnit) { + Try isNull = checkNonNull(measurementUnit, "a measurement unit"); + + if (isNull.isFailure()) { + return isNull; + } + + return Try.ofVoid( + !measurementUnit.getP() + && !measurementUnit.getQ() + && !measurementUnit.getVAng() + && !measurementUnit.getVMag(), + () -> + new UnsafeEntityException( + "Measurement Unit does not measure any values", measurementUnit)); } } diff --git a/src/main/java/edu/ie3/datamodel/utils/validation/NodeValidationUtils.java b/src/main/java/edu/ie3/datamodel/utils/validation/NodeValidationUtils.java index 8aa3c545b..37f948c95 100644 --- a/src/main/java/edu/ie3/datamodel/utils/validation/NodeValidationUtils.java +++ b/src/main/java/edu/ie3/datamodel/utils/validation/NodeValidationUtils.java @@ -5,12 +5,14 @@ */ package edu.ie3.datamodel.utils.validation; -import edu.ie3.datamodel.exceptions.InvalidEntityException; -import edu.ie3.datamodel.exceptions.UnsafeEntityException; -import edu.ie3.datamodel.exceptions.VoltageLevelException; +import edu.ie3.datamodel.exceptions.*; import edu.ie3.datamodel.models.StandardUnits; import edu.ie3.datamodel.models.input.NodeInput; import edu.ie3.datamodel.models.voltagelevels.VoltageLevel; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.Failure; +import java.util.ArrayList; +import java.util.List; import tech.units.indriya.quantity.Quantities; import tech.units.indriya.unit.Units; @@ -30,34 +32,61 @@ private NodeValidationUtils() { * - geoPosition is not null * * @param node Node to validate + * @return a list of try objects either containing an {@link ValidationException} or an empty + * Success */ - protected static void check(NodeInput node) { - checkNonNull(node, "a node"); + protected static List> check(NodeInput node) { + Try isNull = checkNonNull(node, "a node"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); + try { checkVoltageLevel(node.getVoltLvl()); } catch (VoltageLevelException e) { - throw new InvalidEntityException("Node has invalid voltage level", node); + exceptions.add( + new Failure<>(new InvalidEntityException("Node has invalid voltage level", node))); + } catch (InvalidEntityException invalidEntityException) { + exceptions.add(new Failure<>(invalidEntityException)); } - if (node.getvTarget() - .isLessThanOrEqualTo(Quantities.getQuantity(0, StandardUnits.TARGET_VOLTAGE_MAGNITUDE))) - throw new InvalidEntityException("Target voltage (p.u.) is not a positive value", node); - else if (node.getvTarget() - .isGreaterThan(Quantities.getQuantity(2, StandardUnits.TARGET_VOLTAGE_MAGNITUDE))) - throw new UnsafeEntityException("Target voltage (p.u.) might be too high", node); - if (node.getSubnet() <= 0) - throw new InvalidEntityException("Subnet can't be zero or negative", node); - if (node.getGeoPosition() == null) - throw new InvalidEntityException("GeoPosition of node is null", node); + + exceptions.add( + Try.ofVoid( + node.getvTarget() + .isLessThanOrEqualTo( + Quantities.getQuantity(0, StandardUnits.TARGET_VOLTAGE_MAGNITUDE)), + () -> + new InvalidEntityException("Target voltage (p.u.) is not a positive value", node))); + exceptions.add( + Try.ofVoid( + node.getvTarget() + .isGreaterThan(Quantities.getQuantity(2, StandardUnits.TARGET_VOLTAGE_MAGNITUDE)), + () -> new UnsafeEntityException("Target voltage (p.u.) might be too high", node))); + exceptions.add( + Try.ofVoid( + node.getSubnet() <= 0, + () -> new InvalidEntityException("Subnet can't be zero or negative", node))); + exceptions.add( + Try.ofVoid( + node.getGeoPosition() == null, + () -> new InvalidEntityException("GeoPosition of node is null", node))); + + return exceptions; } /** * Validates a voltage level * * @param voltageLevel Element to validate + * @throws InvalidEntityException If the given voltage level is null * @throws VoltageLevelException If nominal voltage is not apparent or not a positive value */ - private static void checkVoltageLevel(VoltageLevel voltageLevel) throws VoltageLevelException { - checkNonNull(voltageLevel, "a voltage level"); + private static void checkVoltageLevel(VoltageLevel voltageLevel) + throws InvalidEntityException, VoltageLevelException { + checkNonNull(voltageLevel, "a voltage level").getOrThrow(); if (voltageLevel.getNominalVoltage() == null) throw new VoltageLevelException( "The nominal voltage of voltage level " + voltageLevel + " is null"); diff --git a/src/main/java/edu/ie3/datamodel/utils/validation/SystemParticipantValidationUtils.java b/src/main/java/edu/ie3/datamodel/utils/validation/SystemParticipantValidationUtils.java index dfa317819..b2e0bc896 100644 --- a/src/main/java/edu/ie3/datamodel/utils/validation/SystemParticipantValidationUtils.java +++ b/src/main/java/edu/ie3/datamodel/utils/validation/SystemParticipantValidationUtils.java @@ -9,9 +9,14 @@ import edu.ie3.datamodel.exceptions.InvalidEntityException; import edu.ie3.datamodel.exceptions.NotImplementedException; +import edu.ie3.datamodel.exceptions.TryException; import edu.ie3.datamodel.models.input.InputEntity; import edu.ie3.datamodel.models.input.system.*; import edu.ie3.datamodel.models.input.system.type.*; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.Failure; +import java.util.ArrayList; +import java.util.List; import javax.measure.Quantity; import javax.measure.quantity.Dimensionless; import tech.units.indriya.ComparableQuantity; @@ -34,35 +39,59 @@ private SystemParticipantValidationUtils() { * fulfill the checking task, based on the class of the given object. * * @param systemParticipant systemParticipant to validate - * @throws edu.ie3.datamodel.exceptions.NotImplementedException if an unknown class is handed in + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - protected static void check(SystemParticipantInput systemParticipant) { - checkNonNull(systemParticipant, "a system participant"); - if (systemParticipant.getqCharacteristics() == null) - throw new InvalidEntityException( - "Reactive power characteristics of system participant is not defined", systemParticipant); + protected static List> check( + SystemParticipantInput systemParticipant) { + Try isNull = + checkNonNull(systemParticipant, "a system participant"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); + + exceptions.add( + Try.ofVoid( + systemParticipant.getqCharacteristics() == null, + () -> + new InvalidEntityException( + "Reactive power characteristics of system participant is not defined", + systemParticipant))); // Further checks for subclasses - if (BmInput.class.isAssignableFrom(systemParticipant.getClass())) - checkBm((BmInput) systemParticipant); - else if (ChpInput.class.isAssignableFrom(systemParticipant.getClass())) - checkChp((ChpInput) systemParticipant); - else if (EvInput.class.isAssignableFrom(systemParticipant.getClass())) - checkEv((EvInput) systemParticipant); - else if (FixedFeedInInput.class.isAssignableFrom(systemParticipant.getClass())) - checkFixedFeedIn((FixedFeedInInput) systemParticipant); - else if (HpInput.class.isAssignableFrom(systemParticipant.getClass())) - checkHp((HpInput) systemParticipant); - else if (LoadInput.class.isAssignableFrom(systemParticipant.getClass())) - checkLoad((LoadInput) systemParticipant); - else if (PvInput.class.isAssignableFrom(systemParticipant.getClass())) - checkPv((PvInput) systemParticipant); - else if (StorageInput.class.isAssignableFrom(systemParticipant.getClass())) - checkStorage((StorageInput) systemParticipant); - else if (WecInput.class.isAssignableFrom(systemParticipant.getClass())) - checkWec((WecInput) systemParticipant); - else if (EvcsInput.class.isAssignableFrom(systemParticipant.getClass())) checkEvcs(); - else throw checkNotImplementedException(systemParticipant); + if (BmInput.class.isAssignableFrom(systemParticipant.getClass())) { + exceptions.addAll(checkBm((BmInput) systemParticipant)); + } else if (ChpInput.class.isAssignableFrom(systemParticipant.getClass())) { + exceptions.addAll(checkChp((ChpInput) systemParticipant)); + } else if (EvInput.class.isAssignableFrom(systemParticipant.getClass())) { + exceptions.addAll(checkEv((EvInput) systemParticipant)); + } else if (FixedFeedInInput.class.isAssignableFrom(systemParticipant.getClass())) { + exceptions.addAll(checkFixedFeedIn((FixedFeedInInput) systemParticipant)); + } else if (HpInput.class.isAssignableFrom(systemParticipant.getClass())) { + exceptions.addAll(checkHp((HpInput) systemParticipant)); + } else if (LoadInput.class.isAssignableFrom(systemParticipant.getClass())) { + exceptions.addAll(checkLoad((LoadInput) systemParticipant)); + } else if (PvInput.class.isAssignableFrom(systemParticipant.getClass())) { + exceptions.addAll(checkPv((PvInput) systemParticipant)); + } else if (StorageInput.class.isAssignableFrom(systemParticipant.getClass())) { + exceptions.addAll(checkStorage((StorageInput) systemParticipant)); + } else if (WecInput.class.isAssignableFrom(systemParticipant.getClass())) { + exceptions.addAll(checkWec((WecInput) systemParticipant)); + } else if (EvcsInput.class.isAssignableFrom(systemParticipant.getClass())) { + exceptions.add( + Try.ofVoid(SystemParticipantValidationUtils::checkEvcs, NotImplementedException.class) + .transformF(e -> new InvalidEntityException(e.getMessage(), e.getCause()))); + } else { + exceptions.add( + new Failure<>( + new InvalidEntityException( + "Validation failed due to: ", buildNotImplementedException(systemParticipant)))); + } + + return exceptions; } /** @@ -77,37 +106,75 @@ else if (WecInput.class.isAssignableFrom(systemParticipant.getClass())) * fulfill the checking task, based on the class of the given object. * * @param systemParticipantTypeInput systemParticipant Type to validate - * @throws edu.ie3.datamodel.exceptions.NotImplementedException if an unknown class is handed in + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - protected static void checkType(SystemParticipantTypeInput systemParticipantTypeInput) { - checkNonNull(systemParticipantTypeInput, "a system participant type"); - if ((systemParticipantTypeInput.getCapex() == null) - || (systemParticipantTypeInput.getOpex() == null) - || (systemParticipantTypeInput.getsRated() == null)) - throw new InvalidEntityException( - "At least one of capex, opex, or sRated is null", systemParticipantTypeInput); - detectNegativeQuantities( - new Quantity[] { - systemParticipantTypeInput.getCapex(), - systemParticipantTypeInput.getOpex(), - systemParticipantTypeInput.getsRated() - }, - systemParticipantTypeInput); - checkRatedPowerFactor(systemParticipantTypeInput, systemParticipantTypeInput.getCosPhiRated()); - - if (BmTypeInput.class.isAssignableFrom(systemParticipantTypeInput.getClass())) - checkBmType((BmTypeInput) systemParticipantTypeInput); - else if (ChpTypeInput.class.isAssignableFrom(systemParticipantTypeInput.getClass())) - checkChpType((ChpTypeInput) systemParticipantTypeInput); - else if (EvTypeInput.class.isAssignableFrom(systemParticipantTypeInput.getClass())) - checkEvType((EvTypeInput) systemParticipantTypeInput); - else if (HpTypeInput.class.isAssignableFrom(systemParticipantTypeInput.getClass())) - checkHpType((HpTypeInput) systemParticipantTypeInput); - else if (StorageTypeInput.class.isAssignableFrom(systemParticipantTypeInput.getClass())) - checkStorageType((StorageTypeInput) systemParticipantTypeInput); - else if (WecTypeInput.class.isAssignableFrom(systemParticipantTypeInput.getClass())) - checkWecType((WecTypeInput) systemParticipantTypeInput); - else throw checkNotImplementedException(systemParticipantTypeInput); + protected static List> checkType( + SystemParticipantTypeInput systemParticipantTypeInput) { + Try isNull = + checkNonNull(systemParticipantTypeInput, "a system participant type"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); + + exceptions.add( + Try.ofVoid( + (systemParticipantTypeInput.getCapex() == null) + || (systemParticipantTypeInput.getOpex() == null) + || (systemParticipantTypeInput.getsRated() == null), + () -> + new InvalidEntityException( + "At least one of capex, opex, or sRated is null", systemParticipantTypeInput))); + + try { + exceptions.add( + Try.ofVoid( + () -> + detectNegativeQuantities( + new Quantity[] { + systemParticipantTypeInput.getCapex(), + systemParticipantTypeInput.getOpex(), + systemParticipantTypeInput.getsRated() + }, + systemParticipantTypeInput), + InvalidEntityException.class)); + } catch (TryException e) { + Throwable wronglyCaught = e.getCause(); + exceptions.add( + Failure.ofVoid(new InvalidEntityException(wronglyCaught.getMessage(), wronglyCaught))); + } + + exceptions.add( + Try.ofVoid( + () -> + checkRatedPowerFactor( + systemParticipantTypeInput, systemParticipantTypeInput.getCosPhiRated()), + InvalidEntityException.class)); + + if (BmTypeInput.class.isAssignableFrom(systemParticipantTypeInput.getClass())) { + exceptions.addAll(checkBmType((BmTypeInput) systemParticipantTypeInput)); + } else if (ChpTypeInput.class.isAssignableFrom(systemParticipantTypeInput.getClass())) { + exceptions.addAll(checkChpType((ChpTypeInput) systemParticipantTypeInput)); + } else if (EvTypeInput.class.isAssignableFrom(systemParticipantTypeInput.getClass())) { + exceptions.add(checkEvType((EvTypeInput) systemParticipantTypeInput)); + } else if (HpTypeInput.class.isAssignableFrom(systemParticipantTypeInput.getClass())) { + exceptions.add(checkHpType((HpTypeInput) systemParticipantTypeInput)); + } else if (StorageTypeInput.class.isAssignableFrom(systemParticipantTypeInput.getClass())) { + exceptions.addAll(checkStorageType((StorageTypeInput) systemParticipantTypeInput)); + } else if (WecTypeInput.class.isAssignableFrom(systemParticipantTypeInput.getClass())) { + exceptions.addAll(checkWecType((WecTypeInput) systemParticipantTypeInput)); + } else { + exceptions.add( + new Failure<>( + new InvalidEntityException( + buildNotImplementedException(systemParticipantTypeInput).getMessage(), + systemParticipantTypeInput))); + } + + return exceptions; } /** @@ -116,9 +183,11 @@ else if (WecTypeInput.class.isAssignableFrom(systemParticipantTypeInput.getClass * properties
* * @param bmInput BmInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkBm(BmInput bmInput) { - checkType(bmInput.getType()); + private static List> checkBm(BmInput bmInput) { + return checkType(bmInput.getType()); } /** @@ -127,10 +196,18 @@ private static void checkBm(BmInput bmInput) { * - its efficiency of assets inverter is between 0% and 100% * * @param bmTypeInput BmTypeInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkBmType(BmTypeInput bmTypeInput) { - detectNegativeQuantities(new Quantity[] {bmTypeInput.getActivePowerGradient()}, bmTypeInput); - isBetweenZeroAndHundredPercent(bmTypeInput, bmTypeInput.getEtaConv(), "Efficiency of inverter"); + private static List> checkBmType(BmTypeInput bmTypeInput) { + return Try.ofVoid( + InvalidEntityException.class, + () -> + detectNegativeQuantities( + new Quantity[] {bmTypeInput.getActivePowerGradient()}, bmTypeInput), + () -> + isBetweenZeroAndHundredPercent( + bmTypeInput, bmTypeInput.getEtaConv(), "Efficiency of inverter")); } /** @@ -139,9 +216,11 @@ private static void checkBmType(BmTypeInput bmTypeInput) { * properties * * @param chpInput ChpInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkChp(ChpInput chpInput) { - checkType(chpInput.getType()); + private static List> checkChp(ChpInput chpInput) { + return checkType(chpInput.getType()); } /** @@ -152,13 +231,22 @@ private static void checkChp(ChpInput chpInput) { * - its needed self-consumption is not negative * * @param chpTypeInput ChpTypeInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkChpType(ChpTypeInput chpTypeInput) { - detectNegativeQuantities(new Quantity[] {chpTypeInput.getpOwn()}, chpTypeInput); - detectZeroOrNegativeQuantities(new Quantity[] {chpTypeInput.getpThermal()}, chpTypeInput); - isBetweenZeroAndHundredPercent(chpTypeInput, chpTypeInput.getEtaEl(), "Electrical efficiency"); - isBetweenZeroAndHundredPercent( - chpTypeInput, chpTypeInput.getEtaThermal(), "Thermal efficiency"); + private static List> checkChpType(ChpTypeInput chpTypeInput) { + return Try.ofVoid( + InvalidEntityException.class, + () -> detectNegativeQuantities(new Quantity[] {chpTypeInput.getpOwn()}, chpTypeInput), + () -> + detectZeroOrNegativeQuantities( + new Quantity[] {chpTypeInput.getpThermal()}, chpTypeInput), + () -> + isBetweenZeroAndHundredPercent( + chpTypeInput, chpTypeInput.getEtaEl(), "Electrical efficiency"), + () -> + isBetweenZeroAndHundredPercent( + chpTypeInput, chpTypeInput.getEtaThermal(), "Thermal efficiency")); } /** @@ -167,9 +255,11 @@ private static void checkChpType(ChpTypeInput chpTypeInput) { * properties * * @param evInput EvInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkEv(EvInput evInput) { - checkType(evInput.getType()); + private static List> checkEv(EvInput evInput) { + return checkType(evInput.getType()); } /** @@ -178,10 +268,14 @@ private static void checkEv(EvInput evInput) { * - its energy consumption per driven kilometre is positive * * @param evTypeInput EvTypeInput to validate + * @return a try object either containing an {@link InvalidEntityException} or an empty Success */ - private static void checkEvType(EvTypeInput evTypeInput) { - detectZeroOrNegativeQuantities( - new Quantity[] {evTypeInput.geteStorage(), evTypeInput.geteCons()}, evTypeInput); + private static Try checkEvType(EvTypeInput evTypeInput) { + return Try.ofVoid( + () -> + detectZeroOrNegativeQuantities( + new Quantity[] {evTypeInput.geteStorage(), evTypeInput.geteCons()}, evTypeInput), + InvalidEntityException.class); } /** @@ -190,10 +284,17 @@ private static void checkEvType(EvTypeInput evTypeInput) { * - its rated power factor is between 0 and 1 * * @param fixedFeedInInput FixedFeedInInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkFixedFeedIn(FixedFeedInInput fixedFeedInInput) { - detectNegativeQuantities(new Quantity[] {fixedFeedInInput.getsRated()}, fixedFeedInInput); - checkRatedPowerFactor(fixedFeedInInput, fixedFeedInInput.getCosPhiRated()); + private static List> checkFixedFeedIn( + FixedFeedInInput fixedFeedInInput) { + return Try.ofVoid( + InvalidEntityException.class, + () -> + detectNegativeQuantities( + new Quantity[] {fixedFeedInInput.getsRated()}, fixedFeedInInput), + () -> checkRatedPowerFactor(fixedFeedInInput, fixedFeedInInput.getCosPhiRated())); } /** @@ -202,9 +303,11 @@ private static void checkFixedFeedIn(FixedFeedInInput fixedFeedInInput) { * properties * * @param hpInput HpInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkHp(HpInput hpInput) { - checkType(hpInput.getType()); + private static List> checkHp(HpInput hpInput) { + return checkType(hpInput.getType()); } /** @@ -212,9 +315,14 @@ private static void checkHp(HpInput hpInput) { * - its rated thermal power is positive * * @param hpTypeInput HpTypeInput to validate + * @return a try object either containing an {@link InvalidEntityException} or an empty Success */ - private static void checkHpType(HpTypeInput hpTypeInput) { - detectZeroOrNegativeQuantities(new Quantity[] {hpTypeInput.getpThermal()}, hpTypeInput); + private static Try checkHpType(HpTypeInput hpTypeInput) { + return Try.ofVoid( + () -> + detectZeroOrNegativeQuantities( + new Quantity[] {hpTypeInput.getpThermal()}, hpTypeInput), + InvalidEntityException.class); } /** @@ -225,13 +333,29 @@ private static void checkHpType(HpTypeInput hpTypeInput) { * - its rated power factor is between 0 and 1 * * @param loadInput LoadInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkLoad(LoadInput loadInput) { - if (loadInput.getLoadProfile() == null) - throw new InvalidEntityException("No standard load profile defined for load", loadInput); - detectNegativeQuantities( - new Quantity[] {loadInput.getsRated(), loadInput.geteConsAnnual()}, loadInput); - checkRatedPowerFactor(loadInput, loadInput.getCosPhiRated()); + private static List> checkLoad(LoadInput loadInput) { + List> exceptions = new ArrayList<>(); + + exceptions.add( + Try.ofVoid( + loadInput.getLoadProfile() == null, + () -> + new InvalidEntityException( + "No standard load profile defined for load", loadInput))); + + exceptions.addAll( + Try.ofVoid( + InvalidEntityException.class, + () -> + detectNegativeQuantities( + new Quantity[] {loadInput.getsRated(), loadInput.geteConsAnnual()}, + loadInput), + () -> checkRatedPowerFactor(loadInput, loadInput.getCosPhiRated()))); + + return exceptions; } /** @@ -244,14 +368,20 @@ private static void checkLoad(LoadInput loadInput) { * - its rated power factor is between 0 and 1 * * @param pvInput PvInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkPv(PvInput pvInput) { - detectNegativeQuantities(new Quantity[] {pvInput.getsRated()}, pvInput); - checkAlbedo(pvInput); - checkAzimuth(pvInput); - isBetweenZeroAndHundredPercent(pvInput, pvInput.getEtaConv(), "Efficiency of the converter"); - checkElevationAngle(pvInput); - checkRatedPowerFactor(pvInput, pvInput.getCosPhiRated()); + private static List> checkPv(PvInput pvInput) { + return Try.ofVoid( + InvalidEntityException.class, + () -> detectNegativeQuantities(new Quantity[] {pvInput.getsRated()}, pvInput), + () -> checkAlbedo(pvInput), + () -> checkAzimuth(pvInput), + () -> + isBetweenZeroAndHundredPercent( + pvInput, pvInput.getEtaConv(), "Efficiency of the converter"), + () -> checkElevationAngle(pvInput), + () -> checkRatedPowerFactor(pvInput, pvInput.getCosPhiRated())); } /** @@ -259,7 +389,7 @@ private static void checkPv(PvInput pvInput) { * * @param pvInput PvInput to validate */ - private static void checkAlbedo(PvInput pvInput) { + private static void checkAlbedo(PvInput pvInput) throws InvalidEntityException { if (pvInput.getAlbedo() < 0d || pvInput.getAlbedo() > 1d) throw new InvalidEntityException( "Albedo of the plant's surrounding of " @@ -273,7 +403,7 @@ private static void checkAlbedo(PvInput pvInput) { * * @param pvInput PvInput to validate */ - private static void checkAzimuth(PvInput pvInput) { + private static void checkAzimuth(PvInput pvInput) throws InvalidEntityException { if (pvInput.getAzimuth().isLessThan(Quantities.getQuantity(-90d, AZIMUTH)) || pvInput.getAzimuth().isGreaterThan(Quantities.getQuantity(90d, AZIMUTH))) throw new InvalidEntityException( @@ -288,7 +418,7 @@ private static void checkAzimuth(PvInput pvInput) { * * @param pvInput PvInput to validate */ - private static void checkElevationAngle(PvInput pvInput) { + private static void checkElevationAngle(PvInput pvInput) throws InvalidEntityException { if (pvInput.getElevationAngle().isLessThan(Quantities.getQuantity(0d, SOLAR_ELEVATION_ANGLE)) || pvInput .getElevationAngle() @@ -306,9 +436,11 @@ private static void checkElevationAngle(PvInput pvInput) { * type properties * * @param storageInput StorageInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkStorage(StorageInput storageInput) { - checkType(storageInput.getType()); + private static List> checkStorage(StorageInput storageInput) { + return checkType(storageInput.getType()); } /** @@ -322,25 +454,47 @@ private static void checkStorage(StorageInput storageInput) { * - its permissible hours of full use is not negative * * @param storageTypeInput StorageTypeInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkStorageType(StorageTypeInput storageTypeInput) { - if (storageTypeInput.getLifeCycle() < 0) - throw new InvalidEntityException( - "Permissible amount of life cycles of the storage type must be zero or positive", - storageTypeInput); - isBetweenZeroAndHundredPercent( - storageTypeInput, storageTypeInput.getEta(), "Efficiency of the electrical converter"); - isBetweenZeroAndHundredPercent( - storageTypeInput, storageTypeInput.getDod(), "Maximum permissible depth of discharge"); - detectNegativeQuantities( - new Quantity[] { - storageTypeInput.getpMax(), - storageTypeInput.getActivePowerGradient(), - storageTypeInput.getLifeTime() - }, - storageTypeInput); - detectZeroOrNegativeQuantities( - new Quantity[] {storageTypeInput.geteStorage()}, storageTypeInput); + private static List> checkStorageType( + StorageTypeInput storageTypeInput) { + List> exceptions = new ArrayList<>(); + + exceptions.add( + Try.ofVoid( + storageTypeInput.getLifeCycle() < 0, + () -> + new InvalidEntityException( + "Permissible amount of life cycles of the storage type must be zero or positive", + storageTypeInput))); + + exceptions.addAll( + Try.ofVoid( + InvalidEntityException.class, + () -> + isBetweenZeroAndHundredPercent( + storageTypeInput, + storageTypeInput.getEta(), + "Efficiency of the electrical converter"), + () -> + isBetweenZeroAndHundredPercent( + storageTypeInput, + storageTypeInput.getDod(), + "Maximum permissible depth of discharge"), + () -> + detectNegativeQuantities( + new Quantity[] { + storageTypeInput.getpMax(), + storageTypeInput.getActivePowerGradient(), + storageTypeInput.getLifeTime() + }, + storageTypeInput), + () -> + detectZeroOrNegativeQuantities( + new Quantity[] {storageTypeInput.geteStorage()}, storageTypeInput))); + + return exceptions; } /** @@ -349,9 +503,11 @@ private static void checkStorageType(StorageTypeInput storageTypeInput) { * properties * * @param wecInput WecInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkWec(WecInput wecInput) { - checkType(wecInput.getType()); + private static List> checkWec(WecInput wecInput) { + return checkType(wecInput.getType()); } /** @@ -361,12 +517,19 @@ private static void checkWec(WecInput wecInput) { * - its height of the rotor hub is not negative * * @param wecTypeInput WecTypeInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkWecType(WecTypeInput wecTypeInput) { - isBetweenZeroAndHundredPercent( - wecTypeInput, wecTypeInput.getEtaConv(), "Efficiency of the converter"); - detectNegativeQuantities( - new Quantity[] {wecTypeInput.getRotorArea(), wecTypeInput.getHubHeight()}, wecTypeInput); + private static List> checkWecType(WecTypeInput wecTypeInput) { + return Try.ofVoid( + InvalidEntityException.class, + () -> + isBetweenZeroAndHundredPercent( + wecTypeInput, wecTypeInput.getEtaConv(), "Efficiency of the converter"), + () -> + detectNegativeQuantities( + new Quantity[] {wecTypeInput.getRotorArea(), wecTypeInput.getHubHeight()}, + wecTypeInput)); } /** Validates a EvcsInput */ @@ -383,7 +546,8 @@ private static void checkEvcs() { * @param input entity to validate * @param cosPhiRated rated power factor to check */ - private static void checkRatedPowerFactor(InputEntity input, double cosPhiRated) { + private static void checkRatedPowerFactor(InputEntity input, double cosPhiRated) + throws InvalidEntityException { if (cosPhiRated < 0d || cosPhiRated > 1d) throw new InvalidEntityException( "Rated power factor of " + input.getClass().getSimpleName() + " must be between 0 and 1", @@ -398,7 +562,8 @@ private static void checkRatedPowerFactor(InputEntity input, double cosPhiRated) * @param value value of entity to check */ private static void isBetweenZeroAndHundredPercent( - InputEntity input, ComparableQuantity value, String string) { + InputEntity input, ComparableQuantity value, String string) + throws InvalidEntityException { if (value.isLessThan(Quantities.getQuantity(0d, Units.PERCENT)) || value.isGreaterThan(Quantities.getQuantity(100d, Units.PERCENT))) throw new InvalidEntityException( diff --git a/src/main/java/edu/ie3/datamodel/utils/validation/ThermalUnitValidationUtils.java b/src/main/java/edu/ie3/datamodel/utils/validation/ThermalUnitValidationUtils.java index e968d210e..8dd2b39f0 100644 --- a/src/main/java/edu/ie3/datamodel/utils/validation/ThermalUnitValidationUtils.java +++ b/src/main/java/edu/ie3/datamodel/utils/validation/ThermalUnitValidationUtils.java @@ -5,8 +5,14 @@ */ package edu.ie3.datamodel.utils.validation; +import edu.ie3.datamodel.exceptions.FailedValidationException; import edu.ie3.datamodel.exceptions.InvalidEntityException; +import edu.ie3.datamodel.exceptions.ValidationException; import edu.ie3.datamodel.models.input.thermal.*; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.Failure; +import java.util.ArrayList; +import java.util.List; import javax.measure.Quantity; public class ThermalUnitValidationUtils extends ValidationUtils { @@ -23,17 +29,32 @@ private ThermalUnitValidationUtils() { * the checking task, based on the class of the given object. * * @param thermalUnitInput ThermalUnitInput to validate - * @throws edu.ie3.datamodel.exceptions.NotImplementedException if an unknown class is handed in + * @return a list of try objects either containing an {@link ValidationException} or an empty + * Success */ - protected static void check(ThermalUnitInput thermalUnitInput) { - checkNonNull(thermalUnitInput, "a thermal unit"); + protected static List> check( + ThermalUnitInput thermalUnitInput) { + Try isNull = checkNonNull(thermalUnitInput, "a thermal unit"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); // Further checks for subclasses - if (ThermalSinkInput.class.isAssignableFrom(thermalUnitInput.getClass())) - checkThermalSink((ThermalSinkInput) thermalUnitInput); - else if (ThermalStorageInput.class.isAssignableFrom(thermalUnitInput.getClass())) - checkThermalStorage((ThermalStorageInput) thermalUnitInput); - else throw checkNotImplementedException(thermalUnitInput); + if (ThermalSinkInput.class.isAssignableFrom(thermalUnitInput.getClass())) { + exceptions.addAll(checkThermalSink((ThermalSinkInput) thermalUnitInput)); + } else if (ThermalStorageInput.class.isAssignableFrom(thermalUnitInput.getClass())) { + exceptions.addAll(checkThermalStorage((ThermalStorageInput) thermalUnitInput)); + } else { + exceptions.add( + new Failure<>( + new FailedValidationException( + buildNotImplementedException(thermalUnitInput).getMessage()))); + } + + return exceptions; } /** @@ -43,15 +64,30 @@ else if (ThermalStorageInput.class.isAssignableFrom(thermalUnitInput.getClass()) * the checking task, based on the class of the given object. * * @param thermalSinkInput ThermalSinkInput to validate - * @throws edu.ie3.datamodel.exceptions.NotImplementedException if an unknown class is handed in + * @return a list of try objects either containing an {@link ValidationException} or an empty + * Success */ - private static void checkThermalSink(ThermalSinkInput thermalSinkInput) { - checkNonNull(thermalSinkInput, "a thermal sink"); + private static List> checkThermalSink( + ThermalSinkInput thermalSinkInput) { + Try isNull = checkNonNull(thermalSinkInput, "a thermal sink"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); // Further checks for subclasses - if (ThermalHouseInput.class.isAssignableFrom(thermalSinkInput.getClass())) - checkThermalHouse((ThermalHouseInput) thermalSinkInput); - else throw checkNotImplementedException(thermalSinkInput); + if (ThermalHouseInput.class.isAssignableFrom(thermalSinkInput.getClass())) { + exceptions.addAll(checkThermalHouse((ThermalHouseInput) thermalSinkInput)); + } else { + exceptions.add( + new Failure<>( + new FailedValidationException( + buildNotImplementedException(thermalSinkInput).getMessage()))); + } + + return exceptions; } /** @@ -61,15 +97,31 @@ private static void checkThermalSink(ThermalSinkInput thermalSinkInput) { * the checking task, based on the class of the given object. * * @param thermalStorageInput ThermalStorageInput to validate - * @throws edu.ie3.datamodel.exceptions.NotImplementedException if an unknown class is handed in + * @return a list of try objects either containing an {@link ValidationException} or an empty + * Success */ - private static void checkThermalStorage(ThermalStorageInput thermalStorageInput) { - checkNonNull(thermalStorageInput, "a thermal storage"); + private static List> checkThermalStorage( + ThermalStorageInput thermalStorageInput) { + Try isNull = + checkNonNull(thermalStorageInput, "a thermal storage"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); // Further checks for subclasses - if (CylindricalStorageInput.class.isAssignableFrom(thermalStorageInput.getClass())) - checkCylindricalStorage((CylindricalStorageInput) thermalStorageInput); - else throw checkNotImplementedException(thermalStorageInput); + if (CylindricalStorageInput.class.isAssignableFrom(thermalStorageInput.getClass())) { + exceptions.addAll(checkCylindricalStorage((CylindricalStorageInput) thermalStorageInput)); + } else { + exceptions.add( + new Failure<>( + new FailedValidationException( + buildNotImplementedException(thermalStorageInput).getMessage()))); + } + + return exceptions; } /** @@ -81,22 +133,42 @@ private static void checkThermalStorage(ThermalStorageInput thermalStorageInput) * - its target temperature lies between the upper und lower limit temperatures * * @param thermalHouseInput ThermalHouseInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkThermalHouse(ThermalHouseInput thermalHouseInput) { - checkNonNull(thermalHouseInput, "a thermal house"); - detectNegativeQuantities( - new Quantity[] {thermalHouseInput.getEthLosses()}, thermalHouseInput); - detectZeroOrNegativeQuantities( - new Quantity[] {thermalHouseInput.getEthCapa()}, thermalHouseInput); + private static List> checkThermalHouse( + ThermalHouseInput thermalHouseInput) { + Try isNull = checkNonNull(thermalHouseInput, "a thermal house"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = + new ArrayList<>( + Try.ofVoid( + InvalidEntityException.class, + () -> + detectNegativeQuantities( + new Quantity[] {thermalHouseInput.getEthLosses()}, thermalHouseInput), + () -> + detectZeroOrNegativeQuantities( + new Quantity[] {thermalHouseInput.getEthCapa()}, thermalHouseInput))); + if (thermalHouseInput .getLowerTemperatureLimit() .isGreaterThan(thermalHouseInput.getTargetTemperature()) || thermalHouseInput .getUpperTemperatureLimit() - .isLessThan(thermalHouseInput.getTargetTemperature())) - throw new InvalidEntityException( - "Target temperature must be higher than lower temperature limit and lower than upper temperature limit", - thermalHouseInput); + .isLessThan(thermalHouseInput.getTargetTemperature())) { + exceptions.add( + new Failure<>( + new InvalidEntityException( + "Target temperature must be higher than lower temperature limit and lower than upper temperature limit", + thermalHouseInput))); + } + + return exceptions; } /** @@ -109,27 +181,53 @@ private static void checkThermalHouse(ThermalHouseInput thermalHouseInput) { * - its specific heat capacity is positive * * @param cylindricalStorageInput CylindricalStorageInput to validate + * @return a list of try objects either containing an {@link InvalidEntityException} or an empty + * Success */ - private static void checkCylindricalStorage(CylindricalStorageInput cylindricalStorageInput) { - checkNonNull(cylindricalStorageInput, "a cylindrical storage"); + private static List> checkCylindricalStorage( + CylindricalStorageInput cylindricalStorageInput) { + Try isNull = + checkNonNull(cylindricalStorageInput, "a cylindrical storage"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); + // Check if inlet temperature is higher/equal to outlet temperature - if (cylindricalStorageInput.getInletTemp().isLessThan(cylindricalStorageInput.getReturnTemp())) - throw new InvalidEntityException( - "Inlet temperature of the cylindrical storage cannot be lower than outlet temperature", - cylindricalStorageInput); + exceptions.add( + Try.ofVoid( + cylindricalStorageInput + .getInletTemp() + .isLessThan(cylindricalStorageInput.getReturnTemp()), + () -> + new InvalidEntityException( + "Inlet temperature of the cylindrical storage cannot be lower than outlet temperature", + cylindricalStorageInput))); // Check if minimum permissible storage volume is lower than overall available storage volume - if (cylindricalStorageInput - .getStorageVolumeLvlMin() - .isGreaterThan(cylindricalStorageInput.getStorageVolumeLvl())) - throw new InvalidEntityException( - "Minimum permissible storage volume of the cylindrical storage cannot be higher than overall available storage volume", - cylindricalStorageInput); - detectZeroOrNegativeQuantities( - new Quantity[] { - cylindricalStorageInput.getStorageVolumeLvl(), - cylindricalStorageInput.getStorageVolumeLvlMin(), - cylindricalStorageInput.getC() - }, - cylindricalStorageInput); + exceptions.add( + Try.ofVoid( + cylindricalStorageInput + .getStorageVolumeLvlMin() + .isGreaterThan(cylindricalStorageInput.getStorageVolumeLvl()), + () -> + new InvalidEntityException( + "Minimum permissible storage volume of the cylindrical storage cannot be higher than overall available storage volume", + cylindricalStorageInput))); + + exceptions.add( + Try.ofVoid( + () -> + detectZeroOrNegativeQuantities( + new Quantity[] { + cylindricalStorageInput.getStorageVolumeLvl(), + cylindricalStorageInput.getStorageVolumeLvlMin(), + cylindricalStorageInput.getC() + }, + cylindricalStorageInput), + InvalidEntityException.class)); + + return exceptions; } } diff --git a/src/main/java/edu/ie3/datamodel/utils/validation/ValidationUtils.java b/src/main/java/edu/ie3/datamodel/utils/validation/ValidationUtils.java index a0fbe3848..c09607a0d 100644 --- a/src/main/java/edu/ie3/datamodel/utils/validation/ValidationUtils.java +++ b/src/main/java/edu/ie3/datamodel/utils/validation/ValidationUtils.java @@ -20,15 +20,20 @@ import edu.ie3.datamodel.models.input.system.SystemParticipantInput; import edu.ie3.datamodel.models.input.system.type.*; import edu.ie3.datamodel.models.input.thermal.ThermalUnitInput; +import edu.ie3.datamodel.utils.Try; +import edu.ie3.datamodel.utils.Try.*; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; import javax.measure.Quantity; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Basic Sanity validation tools for entities */ public class ValidationUtils { + protected static final Logger logger = LoggerFactory.getLogger(ValidationUtils.class); /** Private Constructor as this class is not meant to be instantiated */ protected ValidationUtils() { @@ -42,7 +47,7 @@ protected ValidationUtils() { * @param obj Object, that cannot be checked * @return Exception with predefined error string */ - protected static NotImplementedException checkNotImplementedException(Object obj) { + protected static NotImplementedException buildNotImplementedException(Object obj) { return new NotImplementedException( String.format( "Cannot validate object of class '%s', as no routine is implemented.", @@ -54,18 +59,33 @@ protected static NotImplementedException checkNotImplementedException(Object obj * fulfill the checking task, based on the class of the given object. * * @param obj Object to check - * @throws edu.ie3.datamodel.exceptions.NotImplementedException if an unknown class is handed in */ - public static void check(Object obj) { - checkNonNull(obj, "an object"); - if (AssetInput.class.isAssignableFrom(obj.getClass())) checkAsset((AssetInput) obj); - else if (GridContainer.class.isAssignableFrom(obj.getClass())) - GridContainerValidationUtils.check((GridContainer) obj); - else if (GraphicInput.class.isAssignableFrom(obj.getClass())) - GraphicValidationUtils.check((GraphicInput) obj); - else if (AssetTypeInput.class.isAssignableFrom(obj.getClass())) - checkAssetType((AssetTypeInput) obj); - else throw checkNotImplementedException(obj); + public static void check(Object obj) throws ValidationException { + checkNonNull(obj, "an object").getOrThrow(); + + List> exceptions = new ArrayList<>(); + + if (AssetInput.class.isAssignableFrom(obj.getClass())) { + exceptions.addAll(checkAsset((AssetInput) obj)); + } else if (GridContainer.class.isAssignableFrom(obj.getClass())) { + exceptions.addAll(GridContainerValidationUtils.check((GridContainer) obj)); + } else if (GraphicInput.class.isAssignableFrom(obj.getClass())) { + exceptions.addAll(GraphicValidationUtils.check((GraphicInput) obj)); + } else if (AssetTypeInput.class.isAssignableFrom(obj.getClass())) { + exceptions.addAll(checkAssetType((AssetTypeInput) obj)); + } else { + exceptions.add( + new Failure<>( + new FailedValidationException(buildNotImplementedException(obj).getMessage()))); + } + + List list = + exceptions.stream() + .filter(Try::isFailure) + .map(t -> ((Failure) t).get()) + .toList(); + + Try.ofVoid(!list.isEmpty(), () -> new FailedValidationException(list)).getOrThrow(); } /** @@ -78,44 +98,71 @@ else if (AssetTypeInput.class.isAssignableFrom(obj.getClass())) * the checking task, based on the class of the given object. * * @param assetInput AssetInput to check - * @throws edu.ie3.datamodel.exceptions.NotImplementedException if an unknown class is handed in + * @return a list of try objects either containing a {@link ValidationException} or an empty + * Success */ - private static void checkAsset(AssetInput assetInput) { - checkNonNull(assetInput, "an asset"); - if (assetInput.getId() == null) throw new InvalidEntityException("No ID assigned", assetInput); - if (assetInput.getOperationTime() == null) - throw new InvalidEntityException("Operation time of the asset is not defined", assetInput); - // Check if start time and end time are not null and start time is before end time - if (assetInput.getOperationTime().isLimited()) { - assetInput - .getOperationTime() - .getEndDate() - .ifPresent( - endDate -> - assetInput - .getOperationTime() - .getStartDate() - .ifPresent( - startDate -> { - if (endDate.isBefore(startDate)) - throw new InvalidEntityException( - "Operation start time of the asset has to be before end time", - assetInput); - })); + private static List> checkAsset(AssetInput assetInput) { + Try isNull = checkNonNull(assetInput, "an asset"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); + + exceptions.add( + Try.ofVoid( + assetInput.getId() == null, + () -> new InvalidEntityException("No ID assigned", assetInput))); + + if (assetInput.getOperationTime() == null) { + exceptions.add( + Failure.ofVoid( + new InvalidEntityException( + "Operation time of the asset is not defined", assetInput))); + } else { + // Check if start time and end time are not null and start time is before end time + if (assetInput.getOperationTime().isLimited()) { + assetInput + .getOperationTime() + .getEndDate() + .ifPresent( + endDate -> + assetInput + .getOperationTime() + .getStartDate() + .ifPresent( + startDate -> { + if (endDate.isBefore(startDate)) + exceptions.add( + new Failure<>( + new InvalidEntityException( + "Operation start time of the asset has to be before end time", + assetInput))); + })); + } } // Further checks for subclasses if (NodeInput.class.isAssignableFrom(assetInput.getClass())) - NodeValidationUtils.check((NodeInput) assetInput); + exceptions.addAll(NodeValidationUtils.check((NodeInput) assetInput)); else if (ConnectorInput.class.isAssignableFrom(assetInput.getClass())) - ConnectorValidationUtils.check((ConnectorInput) assetInput); + exceptions.addAll(ConnectorValidationUtils.check((ConnectorInput) assetInput)); else if (MeasurementUnitInput.class.isAssignableFrom(assetInput.getClass())) - MeasurementUnitValidationUtils.check((MeasurementUnitInput) assetInput); + exceptions.add(MeasurementUnitValidationUtils.check((MeasurementUnitInput) assetInput)); else if (SystemParticipantInput.class.isAssignableFrom(assetInput.getClass())) - SystemParticipantValidationUtils.check((SystemParticipantInput) assetInput); + exceptions.addAll( + SystemParticipantValidationUtils.check((SystemParticipantInput) assetInput)); else if (ThermalUnitInput.class.isAssignableFrom(assetInput.getClass())) - ThermalUnitValidationUtils.check((ThermalUnitInput) assetInput); - else throw checkNotImplementedException(assetInput); + exceptions.addAll(ThermalUnitValidationUtils.check((ThermalUnitInput) assetInput)); + else { + exceptions.add( + new Failure<>( + new FailedValidationException( + buildNotImplementedException(assetInput).getMessage()))); + } + + return exceptions; } /** @@ -125,39 +172,97 @@ else if (ThermalUnitInput.class.isAssignableFrom(assetInput.getClass())) * the checking task, based on the class of the given object. * * @param assetTypeInput AssetTypeInput to check - * @throws edu.ie3.datamodel.exceptions.NotImplementedException if an unknown class is handed in + * @return a list of try objects either containing a {@link ValidationException} or an empty + * Success */ - private static void checkAssetType(AssetTypeInput assetTypeInput) { - checkNonNull(assetTypeInput, "an asset type"); - if (assetTypeInput.getUuid() == null) - throw new InvalidEntityException("No UUID assigned", assetTypeInput); - if (assetTypeInput.getId() == null) - throw new InvalidEntityException("No ID assigned", assetTypeInput); + private static List> checkAssetType( + AssetTypeInput assetTypeInput) { + Try isNull = checkNonNull(assetTypeInput, "an asset type"); + + if (isNull.isFailure()) { + return List.of(isNull); + } + + List> exceptions = new ArrayList<>(); + + exceptions.add( + Try.ofVoid( + assetTypeInput.getUuid() == null, + () -> new InvalidEntityException("No UUID assigned", assetTypeInput))); + exceptions.add( + Try.ofVoid( + assetTypeInput.getId() == null, + () -> new InvalidEntityException("No ID assigned", assetTypeInput))); // Further checks for subclasses if (LineTypeInput.class.isAssignableFrom(assetTypeInput.getClass())) - ConnectorValidationUtils.checkLineType((LineTypeInput) assetTypeInput); + exceptions.addAll(ConnectorValidationUtils.checkLineType((LineTypeInput) assetTypeInput)); else if (Transformer2WTypeInput.class.isAssignableFrom(assetTypeInput.getClass())) - ConnectorValidationUtils.checkTransformer2WType((Transformer2WTypeInput) assetTypeInput); + exceptions.addAll( + ConnectorValidationUtils.checkTransformer2WType((Transformer2WTypeInput) assetTypeInput)); else if (Transformer3WTypeInput.class.isAssignableFrom(assetTypeInput.getClass())) - ConnectorValidationUtils.checkTransformer3WType((Transformer3WTypeInput) assetTypeInput); + exceptions.addAll( + ConnectorValidationUtils.checkTransformer3WType((Transformer3WTypeInput) assetTypeInput)); else if (SystemParticipantTypeInput.class.isAssignableFrom(assetTypeInput.getClass())) - SystemParticipantValidationUtils.checkType((SystemParticipantTypeInput) assetTypeInput); + exceptions.addAll( + SystemParticipantValidationUtils.checkType((SystemParticipantTypeInput) assetTypeInput)); else { - throw checkNotImplementedException(assetTypeInput); + exceptions.add( + new Failure<>( + new FailedValidationException( + buildNotImplementedException(assetTypeInput).getMessage()))); } + + return exceptions; + } + + /** + * Checks the validity of the ids for a given set of {@link AssetInput}. + * + * @param inputs a set of asset inputs + * @return a list of try objects either containing an {@link UnsafeEntityException} or an empty + * Success + */ + protected static List> checkIds( + Set inputs) { + List ids = new ArrayList<>(); + List> exceptions = new ArrayList<>(); + + inputs.forEach( + input -> { + String id = input.getId(); + if (!ids.contains(id)) { + ids.add(id); + } else { + exceptions.add( + new Failure<>( + new UnsafeEntityException( + "There is already an entity with the id " + id, input))); + } + }); + + return exceptions; } /** - * Checks, if the given object is null. If so, an {@link InvalidEntityException} is thrown. + * Checks, if the given object is null. If so, an {@link InvalidEntityException} wrapped in a + * {@link Failure} is returned. * * @param obj Object to check * @param expectedDescription Further description, of what has been expected. + * @return either an {@link InvalidEntityException} wrapped in a {@link Failure} or an empty + * {@link Success} */ - protected static void checkNonNull(Object obj, String expectedDescription) { - if (obj == null) - throw new InvalidEntityException( - "Expected " + expectedDescription + ", but got nothing. :-(", new NullPointerException()); + protected static Try checkNonNull( + Object obj, String expectedDescription) { + return Try.ofVoid( + obj == null, + () -> + new InvalidEntityException( + "Validation not possible because received object was null. Expected " + + expectedDescription + + ", but got nothing. :-(", + new NullPointerException())); } /** @@ -167,7 +272,8 @@ protected static void checkNonNull(Object obj, String expectedDescription) { * @param quantities Array of quantities to check * @param entity Unique entity holding the malformed quantities */ - protected static void detectNegativeQuantities(Quantity[] quantities, UniqueEntity entity) { + protected static void detectNegativeQuantities(Quantity[] quantities, UniqueEntity entity) + throws InvalidEntityException { Predicate> predicate = quantity -> quantity.getValue().doubleValue() < 0d; detectMalformedQuantities( quantities, entity, predicate, "The following quantities have to be zero or positive"); @@ -181,7 +287,7 @@ protected static void detectNegativeQuantities(Quantity[] quantities, UniqueE * @param entity Unique entity holding the malformed quantities */ protected static void detectZeroOrNegativeQuantities( - Quantity[] quantities, UniqueEntity entity) { + Quantity[] quantities, UniqueEntity entity) throws InvalidEntityException { Predicate> predicate = quantity -> quantity.getValue().doubleValue() <= 0d; detectMalformedQuantities( quantities, entity, predicate, "The following quantities have to be positive"); @@ -192,7 +298,8 @@ protected static void detectZeroOrNegativeQuantities( * @param quantities Array of quantities to check * @param entity Unique entity holding the malformed quantities */ - protected static void detectPositiveQuantities(Quantity[] quantities, UniqueEntity entity) { + protected static void detectPositiveQuantities(Quantity[] quantities, UniqueEntity entity) + throws InvalidEntityException { Predicate> predicate = quantity -> quantity.getValue().doubleValue() > 0d; detectMalformedQuantities( quantities, entity, predicate, "The following quantities have to be negative"); @@ -208,7 +315,8 @@ protected static void detectPositiveQuantities(Quantity[] quantities, UniqueE * @param msg Message prefix to use for the exception message: [msg]: [malformedQuantities] */ protected static void detectMalformedQuantities( - Quantity[] quantities, UniqueEntity entity, Predicate> predicate, String msg) { + Quantity[] quantities, UniqueEntity entity, Predicate> predicate, String msg) + throws InvalidEntityException { String malformedQuantities = Arrays.stream(quantities) .filter(predicate) diff --git a/src/test/groovy/edu/ie3/datamodel/io/connectors/CsvFileConnectorTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/connectors/CsvFileConnectorTest.groovy index 9c4ef02c3..3b8eede71 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/connectors/CsvFileConnectorTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/connectors/CsvFileConnectorTest.groovy @@ -19,17 +19,14 @@ import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue import edu.ie3.datamodel.models.timeseries.repetitive.RepetitiveTimeSeries import edu.ie3.datamodel.models.value.EnergyPriceValue import edu.ie3.util.io.FileIOUtils -import org.apache.commons.io.FilenameUtils import spock.lang.Shared import spock.lang.Specification import tech.units.indriya.quantity.Quantities import java.nio.file.Files import java.nio.file.Path -import java.nio.file.Paths import java.time.ZonedDateTime import java.util.stream.Collectors -import java.util.stream.Stream class CsvFileConnectorTest extends Specification { @Shared @@ -39,26 +36,26 @@ class CsvFileConnectorTest extends Specification { CsvFileConnector cfc @Shared - Set timeSeriesPaths + Set timeSeriesPaths @Shared - Set pathsToIgnore + Set pathsToIgnore def setupSpec() { tmpDirectory = Files.createTempDirectory("psdm_csv_file_connector_") - cfc = new CsvFileConnector(tmpDirectory.toString(), new FileNamingStrategy()) - def gridPaths = ["node_input.csv"] + cfc = new CsvFileConnector(tmpDirectory, new FileNamingStrategy()) + def gridPaths = [Path.of("node_input.csv")] timeSeriesPaths = [ "its_pq_53990eea-1b5d-47e8-9134-6d8de36604bf.csv", "its_p_fcf0b851-a836-4bde-8090-f44c382ed226.csv", "its_pqh_5022a70e-a58f-4bac-b8ec-1c62376c216b.csv", "its_c_b88dee50-5484-4136-901d-050d8c1c97d1.csv", "its_c_c7b0d9d6-5044-4f51-80b4-f221d8b1f14b.csv" - ] + ].stream().map { file -> Path.of(file) }.collect(Collectors.toSet()) pathsToIgnore = [ - "file_to_be_ignored.txt" + Path.of("file_to_be_ignored.txt") ] - (gridPaths + pathsToIgnore + timeSeriesPaths).forEach { it -> Files.createFile(Paths.get(FilenameUtils.concat(tmpDirectory.toString(), it))) } + (gridPaths + pathsToIgnore + timeSeriesPaths).forEach { path -> Files.createFile(tmpDirectory.resolve(path)) } } def cleanupSpec() { @@ -80,11 +77,11 @@ class CsvFileConnectorTest extends Specification { def "The csv file connector is able to build correct uuid to meta information mapping"() { given: def expected = [ - (UUID.fromString("53990eea-1b5d-47e8-9134-6d8de36604bf")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("53990eea-1b5d-47e8-9134-6d8de36604bf"), ColumnScheme.APPARENT_POWER, "its_pq_53990eea-1b5d-47e8-9134-6d8de36604bf"), - (UUID.fromString("fcf0b851-a836-4bde-8090-f44c382ed226")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("fcf0b851-a836-4bde-8090-f44c382ed226"), ColumnScheme.ACTIVE_POWER, "its_p_fcf0b851-a836-4bde-8090-f44c382ed226"), - (UUID.fromString("5022a70e-a58f-4bac-b8ec-1c62376c216b")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("5022a70e-a58f-4bac-b8ec-1c62376c216b"), ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND, "its_pqh_5022a70e-a58f-4bac-b8ec-1c62376c216b"), - (UUID.fromString("b88dee50-5484-4136-901d-050d8c1c97d1")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("b88dee50-5484-4136-901d-050d8c1c97d1"), ColumnScheme.ENERGY_PRICE, "its_c_b88dee50-5484-4136-901d-050d8c1c97d1"), - (UUID.fromString("c7b0d9d6-5044-4f51-80b4-f221d8b1f14b")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("c7b0d9d6-5044-4f51-80b4-f221d8b1f14b"), ColumnScheme.ENERGY_PRICE, "its_c_c7b0d9d6-5044-4f51-80b4-f221d8b1f14b") + (UUID.fromString("53990eea-1b5d-47e8-9134-6d8de36604bf")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("53990eea-1b5d-47e8-9134-6d8de36604bf"), ColumnScheme.APPARENT_POWER, Path.of("its_pq_53990eea-1b5d-47e8-9134-6d8de36604bf")), + (UUID.fromString("fcf0b851-a836-4bde-8090-f44c382ed226")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("fcf0b851-a836-4bde-8090-f44c382ed226"), ColumnScheme.ACTIVE_POWER, Path.of("its_p_fcf0b851-a836-4bde-8090-f44c382ed226")), + (UUID.fromString("5022a70e-a58f-4bac-b8ec-1c62376c216b")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("5022a70e-a58f-4bac-b8ec-1c62376c216b"), ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND, Path.of("its_pqh_5022a70e-a58f-4bac-b8ec-1c62376c216b")), + (UUID.fromString("b88dee50-5484-4136-901d-050d8c1c97d1")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("b88dee50-5484-4136-901d-050d8c1c97d1"), ColumnScheme.ENERGY_PRICE, Path.of("its_c_b88dee50-5484-4136-901d-050d8c1c97d1")), + (UUID.fromString("c7b0d9d6-5044-4f51-80b4-f221d8b1f14b")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("c7b0d9d6-5044-4f51-80b4-f221d8b1f14b"), ColumnScheme.ENERGY_PRICE, Path.of("its_c_c7b0d9d6-5044-4f51-80b4-f221d8b1f14b")) ] when: @@ -97,9 +94,9 @@ class CsvFileConnectorTest extends Specification { def "The csv file connector is able to build correct uuid to meta information mapping when restricting column schemes"() { given: def expected = [ - (UUID.fromString("b88dee50-5484-4136-901d-050d8c1c97d1")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("b88dee50-5484-4136-901d-050d8c1c97d1"), ColumnScheme.ENERGY_PRICE, "its_c_b88dee50-5484-4136-901d-050d8c1c97d1"), - (UUID.fromString("c7b0d9d6-5044-4f51-80b4-f221d8b1f14b")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("c7b0d9d6-5044-4f51-80b4-f221d8b1f14b"), ColumnScheme.ENERGY_PRICE, "its_c_c7b0d9d6-5044-4f51-80b4-f221d8b1f14b"), - (UUID.fromString("fcf0b851-a836-4bde-8090-f44c382ed226")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("fcf0b851-a836-4bde-8090-f44c382ed226"), ColumnScheme.ACTIVE_POWER, "its_p_fcf0b851-a836-4bde-8090-f44c382ed226") + (UUID.fromString("b88dee50-5484-4136-901d-050d8c1c97d1")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("b88dee50-5484-4136-901d-050d8c1c97d1"), ColumnScheme.ENERGY_PRICE, Path.of("its_c_b88dee50-5484-4136-901d-050d8c1c97d1")), + (UUID.fromString("c7b0d9d6-5044-4f51-80b4-f221d8b1f14b")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("c7b0d9d6-5044-4f51-80b4-f221d8b1f14b"), ColumnScheme.ENERGY_PRICE, Path.of("its_c_c7b0d9d6-5044-4f51-80b4-f221d8b1f14b")), + (UUID.fromString("fcf0b851-a836-4bde-8090-f44c382ed226")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("fcf0b851-a836-4bde-8090-f44c382ed226"), ColumnScheme.ACTIVE_POWER, Path.of("its_p_fcf0b851-a836-4bde-8090-f44c382ed226")) ] when: @@ -114,7 +111,7 @@ class CsvFileConnectorTest extends Specification { def "The csv file connector throws an Exception, if the foreseen file cannot be found"() { given: - def cfc = new CsvFileConnector(tmpDirectory.toString(), new FileNamingStrategy(new EntityPersistenceNamingStrategy(), new DefaultDirectoryHierarchy(tmpDirectory.toString(), "test"))) + def cfc = new CsvFileConnector(tmpDirectory, new FileNamingStrategy(new EntityPersistenceNamingStrategy(), new DefaultDirectoryHierarchy(tmpDirectory, "test"))) when: cfc.initReader(NodeInput) @@ -133,13 +130,13 @@ class CsvFileConnectorTest extends Specification { def "The csv file connector is able to init writers utilizing a directory hierarchy"() { given: "a suitable connector" - def baseDirectory = FilenameUtils.concat(tmpDirectory.toString(), "directoryHierarchy") + def baseDirectory = tmpDirectory.resolve("directoryHierarchy") def directoryHierarchy = new DefaultDirectoryHierarchy(baseDirectory, "test") def fileNamingStrategy = new FileNamingStrategy(new EntityPersistenceNamingStrategy(), directoryHierarchy) def connector = new CsvFileConnector(baseDirectory, fileNamingStrategy) and: "expected results" - def nodeFile = new File(Stream.of(baseDirectory, "test", "input", "grid", "node_input.csv").collect(Collectors.joining(File.separator))) + def nodeFile = baseDirectory.resolve(Path.of("test", "input", "grid", "node_input.csv")).toFile() when: /* The head line is of no interest here */ @@ -153,12 +150,12 @@ class CsvFileConnectorTest extends Specification { def "The csv file connector is able to init writers utilizing no directory hierarchy"() { given: "a suitable connector" - def baseDirectory = FilenameUtils.concat(tmpDirectory.toString(), "directoryHierarchy") + def baseDirectory = tmpDirectory.resolve("directoryHierarchy") def fileNamingStrategy = new FileNamingStrategy() def connector = new CsvFileConnector(baseDirectory, fileNamingStrategy) and: "expected results" - def nodeFile = new File(FilenameUtils.concat(baseDirectory, "node_input.csv")) + def nodeFile = baseDirectory.resolve("node_input.csv").toFile() when: /* The head line is of no interest here */ @@ -172,9 +169,8 @@ class CsvFileConnectorTest extends Specification { def "The csv file connector throws ConnectorException if no csv file definition can be built from class information"() { given: - def baseDirectory = tmpDirectory.toString() def fileNamingStrategy = new FileNamingStrategy() - def connector = new CsvFileConnector(baseDirectory, fileNamingStrategy) + def connector = new CsvFileConnector(tmpDirectory, fileNamingStrategy) when: connector.buildFileDefinition(String, ["a", "b", "c"] as String[], ",") @@ -186,37 +182,42 @@ class CsvFileConnectorTest extends Specification { def "The csv file connector is able to build correct csv file definition from class upon request"() { given: - def baseDirectory = tmpDirectory.toString() def fileNamingStrategy = new FileNamingStrategy() - def connector = new CsvFileConnector(baseDirectory, fileNamingStrategy) - def expected = new CsvFileDefinition("node_input.csv", "", ["a", "b", "c"] as String[], ",") + def connector = new CsvFileConnector(tmpDirectory, fileNamingStrategy) + def expected = new CsvFileDefinition("node_input.csv", Path.of(""), ["a", "b", "c"] as String[], ",") when: def actual = connector.buildFileDefinition(NodeInput, ["a", "b", "c"] as String[], ",") then: - actual == expected + actual.with { + assert it.filePath == expected.filePath + assert it.headLineElements() == expected.headLineElements() + assert it.csvSep() == expected.csvSep() + } } def "The csv file connector is able to build correct csv file definition from class upon request, utilizing directory hierarchy"() { given: - def baseDirectory = tmpDirectory.toString() - def fileNamingStrategy = new FileNamingStrategy(new EntityPersistenceNamingStrategy(), new DefaultDirectoryHierarchy(tmpDirectory.toString(), "test")) - def connector = new CsvFileConnector(baseDirectory, fileNamingStrategy) - def expected = new CsvFileDefinition("node_input.csv", Stream.of("test", "input", "grid").collect(Collectors.joining(File.separator)), ["a", "b", "c"] as String[], ",") + def fileNamingStrategy = new FileNamingStrategy(new EntityPersistenceNamingStrategy(), new DefaultDirectoryHierarchy(tmpDirectory, "test")) + def connector = new CsvFileConnector(tmpDirectory, fileNamingStrategy) + def expected = new CsvFileDefinition("node_input.csv", Path.of("test", "input", "grid"), ["a", "b", "c"] as String[], ",") when: def actual = connector.buildFileDefinition(NodeInput, ["a", "b", "c"] as String[], ",") then: - actual == expected + actual.with { + assert it.filePath == expected.filePath + assert it.headLineElements() == expected.headLineElements() + assert it.csvSep() == expected.csvSep() + } } def "The csv file connector throws ConnectorException if no csv file definition can be built from time series"() { given: "a suitable connector" - def baseDirectory = tmpDirectory.toString() def fileNamingStrategy = new FileNamingStrategy() - def connector = new CsvFileConnector(baseDirectory, fileNamingStrategy) + def connector = new CsvFileConnector(tmpDirectory, fileNamingStrategy) and: "credible input" def timeSeries = Mock(RepetitiveTimeSeries) @@ -231,10 +232,9 @@ class CsvFileConnectorTest extends Specification { def "The csv file connector is able to build correct csv file definition from time series upon request"() { given: "a suitable connector" - def baseDirectory = tmpDirectory.toString() def fileNamingStrategy = new FileNamingStrategy() - def connector = new CsvFileConnector(baseDirectory, fileNamingStrategy) - def expected = new CsvFileDefinition("its_c_0c03ce9f-ab0e-4715-bc13-f9d903f26dbf.csv", "", ["a", "b", "c"] as String[], ",") + def connector = new CsvFileConnector(tmpDirectory, fileNamingStrategy) + def expected = new CsvFileDefinition("its_c_0c03ce9f-ab0e-4715-bc13-f9d903f26dbf.csv", Path.of(""), ["a", "b", "c"] as String[], ",") and: "credible input" def entries = [ @@ -248,15 +248,18 @@ class CsvFileConnectorTest extends Specification { def actual = connector.buildFileDefinition(timeSeries, ["a", "b", "c"] as String[], ",") then: - actual == expected + actual.with { + assert it.filePath == expected.filePath + assert it.headLineElements() == expected.headLineElements() + assert it.csvSep() == expected.csvSep() + } } def "The csv file connector is able to build correct csv file definition from time series upon request, utilizing directory hierarchy"() { given: "a suitable connector" - def baseDirectory = tmpDirectory.toString() - def fileNamingStrategy = new FileNamingStrategy(new EntityPersistenceNamingStrategy(), new DefaultDirectoryHierarchy(tmpDirectory.toString(), "test")) - def connector = new CsvFileConnector(baseDirectory, fileNamingStrategy) - def expected = new CsvFileDefinition("its_c_0c03ce9f-ab0e-4715-bc13-f9d903f26dbf.csv", Stream.of("test", "input", "participants", "time_series").collect(Collectors.joining(File.separator)), ["a", "b", "c"] as String[], ",") + def fileNamingStrategy = new FileNamingStrategy(new EntityPersistenceNamingStrategy(), new DefaultDirectoryHierarchy(tmpDirectory, "test")) + def connector = new CsvFileConnector(tmpDirectory, fileNamingStrategy) + def expected = new CsvFileDefinition("its_c_0c03ce9f-ab0e-4715-bc13-f9d903f26dbf.csv", Path.of("test", "input", "participants", "time_series"), ["a", "b", "c"] as String[], ",") and: "credible input" def entries = [ @@ -270,15 +273,19 @@ class CsvFileConnectorTest extends Specification { def actual = connector.buildFileDefinition(timeSeries, ["a", "b", "c"] as String[], ",") then: - actual == expected + actual.with { + assert it.filePath == expected.filePath + assert it.headLineElements() == expected.headLineElements() + assert it.csvSep() == expected.csvSep() + } } def "Initialising a writer with incorrect base directory leads to ConnectorException"() { given: - def baseFolder = FilenameUtils.concat(tmpDirectory.toString(), "helloWorld.txt") - def baseFolderFile = new File(baseFolder) + def baseFolder = tmpDirectory.resolve("helloWorld.txt") + def baseFolderFile = baseFolder.toFile() baseFolderFile.createNewFile() - def fileDefinition = new CsvFileDefinition("test.csv", "", [] as String[], ",") + def fileDefinition = new CsvFileDefinition("test.csv", Path.of(""), [] as String[], ",") when: cfc.initWriter(baseFolder, fileDefinition) diff --git a/src/test/groovy/edu/ie3/datamodel/io/csv/BufferedCsvWriterTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/csv/BufferedCsvWriterTest.groovy index 97eb15071..a48264940 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/csv/BufferedCsvWriterTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/csv/BufferedCsvWriterTest.groovy @@ -29,9 +29,9 @@ class BufferedCsvWriterTest extends Specification { def "The convenience constructor of the BufferedCsvWriter class works as expected."() { given: - def baseDirectory = tmpDirectory.toString() - def fileDefinition = new CsvFileDefinition("test.csv", "", ["a", "b", "c"] as String[], ",") - def expectedFile = new File(FilenameUtils.concat(tmpDirectory.toString(), fileDefinition.filePath)) + def baseDirectory = tmpDirectory + def fileDefinition = new CsvFileDefinition("test.csv", Path.of(""), ["a", "b", "c"] as String[], ",") + def expectedFile = tmpDirectory.resolve(fileDefinition.filePath).toFile() when: def actual = new BufferedCsvWriter(baseDirectory, fileDefinition, false) @@ -47,7 +47,7 @@ class BufferedCsvWriterTest extends Specification { def "The buffered csv writer refuses to write entries, if their length does not conform the needed length of head line elements"() { given: - def targetFile = FilenameUtils.concat(tmpDirectory.toString(), "test.csv") + def targetFile = tmpDirectory.resolve("test.csv") def writer = new BufferedCsvWriter(targetFile, ["a", "b", "c"] as String[], "c,", false) def malFormedInput = [ "a": "z", @@ -64,7 +64,7 @@ class BufferedCsvWriterTest extends Specification { def "The buffered csv writer refuses to write entries, if keys do not match the required head line"() { given: - def targetFile = FilenameUtils.concat(tmpDirectory.toString(), "test.csv") + def targetFile = tmpDirectory.resolve("test.csv") def writer = new BufferedCsvWriter(targetFile, ["a", "b", "c"] as String[], "c,", false) def malFormedInput = [ "a": "z", @@ -82,7 +82,7 @@ class BufferedCsvWriterTest extends Specification { def "The buffered csv writer writes out content in the order specified by the headline elements"() { given: - def targetFile = FilenameUtils.concat(tmpDirectory.toString(), "order_test.csv") + def targetFile = tmpDirectory.resolve("order_test.csv") def writer = new BufferedCsvWriter(targetFile, ["third_header", "second_header", "first_header"] as String[], ",", false) writer.writeFileHeader() def content = [ @@ -97,7 +97,7 @@ class BufferedCsvWriterTest extends Specification { /* Read in the content */ def writtenContent = "" def headline = "" - try(BufferedReader reader = new BufferedReader(new FileReader(targetFile))) { + try(BufferedReader reader = new BufferedReader(new FileReader(targetFile.toFile()))) { headline = reader.readLine() writtenContent = reader.readLine() } catch (Exception e) { diff --git a/src/test/groovy/edu/ie3/datamodel/io/csv/CsvFileDefinitionTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/csv/CsvFileDefinitionTest.groovy index 054388dad..747d64120 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/csv/CsvFileDefinitionTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/csv/CsvFileDefinitionTest.groovy @@ -9,6 +9,8 @@ import org.apache.commons.io.FilenameUtils import spock.lang.Shared import spock.lang.Specification +import java.nio.file.Path + class CsvFileDefinitionTest extends Specification { @Shared String[] headLineElements @@ -20,45 +22,13 @@ class CsvFileDefinitionTest extends Specification { String fileName @Shared - String directory + Path directory def setupSpec() { headLineElements = ["a", "b", "c"] as String[] csvSep = "," fileName = "node_input.csv" - directory = FilenameUtils.concat("test", "grid") - } - - def "A csv file definition is set up correctly, if the directory path has corrupt file separator"() { - when: - def actual = new CsvFileDefinition(fileName, manipulatedDirectory, headLineElements, csvSep) - - then: - actual.with { - assert it.fileName() == this.fileName - assert it.directoryPath() == this.directory - assert it.headLineElements() == this.headLineElements - assert it.csvSep() == this.csvSep - } - - where: - manipulatedDirectory || expected - "/" + this.directory || this.directory - this.directory + "/" || this.directory - this.directory.replaceAll("[\\\\/]", File.separator == "/" ? "\\\\" : "/") || this.directory - } - - def "A csv file definition is set up correctly, if the directory path is null"() { - when: - def actual = new CsvFileDefinition(fileName, null, headLineElements, csvSep) - - then: - actual.with { - assert it.fileName() == this.fileName - assert it.directoryPath() == "" - assert it.headLineElements() == this.headLineElements - assert it.csvSep() == this.csvSep - } + directory = Path.of("test", "grid") } def "A csv file definition throw IllegalArgumentException, if the file name is malformed"() { @@ -82,8 +52,8 @@ class CsvFileDefinitionTest extends Specification { then: actual.with { - assert it.fileName() == this.fileName - assert it.directoryPath() == this.directory + assert it.filePath.fileName == Path.of(this.fileName) + assert it.directoryPath == this.directory assert it.headLineElements() == this.headLineElements assert it.csvSep() == this.csvSep } @@ -98,26 +68,10 @@ class CsvFileDefinitionTest extends Specification { then: actual.with { - assert it.fileName() == this.fileName - assert it.directoryPath() == directory + assert it.filePath.fileName == Path.of(this.fileName) + assert it.directoryPath == this.directory assert it.headLineElements() == this.headLineElements assert it.csvSep() == this.csvSep } } - - def "A csv file definition returns correct file path"() { - given: - def definition = new CsvFileDefinition(fileName, manipulatedDirectory, headLineElements, csvSep) - - when: - def actual = definition.filePath - - then: - actual == expected - - where: - manipulatedDirectory || expected - "" || this.fileName - FilenameUtils.concat("test", "grid") || FilenameUtils.concat(FilenameUtils.concat("test", "grid"), this.fileName) - } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/csv/GridIoIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/csv/GridIoIT.groovy deleted file mode 100644 index 4c5a195e0..000000000 --- a/src/test/groovy/edu/ie3/datamodel/io/csv/GridIoIT.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * © 2022. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation - */ -package edu.ie3.datamodel.io.csv - -import edu.ie3.datamodel.io.sink.CsvFileSink -import edu.ie3.datamodel.io.source.csv.CsvJointGridContainerSource -import edu.ie3.datamodel.io.source.csv.CsvTestDataMeta -import edu.ie3.util.io.FileIOUtils -import spock.lang.Shared -import spock.lang.Specification - -import java.nio.file.Files -import java.nio.file.Path - -/** - * Testing whether PSDM CSV grids are equal when serialized and deserialized sequentially. - * Grid data should not change when written out or parsed. - */ -class GridIoIT extends Specification implements CsvTestDataMeta { - - @Shared - Path tempDirectory - - @Shared - CsvFileSink sink - - def setupSpec() { - tempDirectory = Files.createTempDirectory("GridIoIT") - sink = new CsvFileSink(tempDirectory.toAbsolutePath().toString()) - } - - def cleanupSpec() { - sink.shutdown() - FileIOUtils.deleteRecursively(tempDirectory) - } - - def "Input JointGridContainer equals Output JointGridContainer."() { - - given: - // create joint grid container - def gridName = "vn_simona" - def separator = "," - def firstGridContainer = CsvJointGridContainerSource.read(gridName, separator, jointGridFolderPath) - - when: - // write files from joint grid container in output directory - sink.persistJointGrid(firstGridContainer) - - // create second grid container from output folder - def secondGridContainer = CsvJointGridContainerSource.read(gridName, separator, tempDirectory.toAbsolutePath().toString()) - - then: - // compare input and output joint grid container - firstGridContainer == secondGridContainer - - } -} diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/graphics/LineGraphicInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/graphics/LineGraphicInputFactoryTest.groovy index 7f5f21ef5..047237f7b 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/graphics/LineGraphicInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/graphics/LineGraphicInputFactoryTest.groovy @@ -5,11 +5,13 @@ */ package edu.ie3.datamodel.io.factory.graphics +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.input.graphics.LineGraphicInputEntityData import edu.ie3.datamodel.io.factory.input.graphics.LineGraphicInputFactory import edu.ie3.datamodel.models.input.connector.LineInput import edu.ie3.datamodel.models.input.graphics.LineGraphicInput import edu.ie3.datamodel.utils.GridAndGeoUtils +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import org.locationtech.jts.geom.LineString import spock.lang.Specification @@ -38,13 +40,13 @@ class LineGraphicInputFactoryTest extends Specification implements FactoryTestHe def lineInput = Mock(LineInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new LineGraphicInputEntityData(parameter, lineInput)) then: - input.present - input.get().getClass() == inputClass - ((LineGraphicInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert path == getGeometry(parameter["path"]) assert graphicLayer == parameter["graphiclayer"] @@ -64,13 +66,13 @@ class LineGraphicInputFactoryTest extends Specification implements FactoryTestHe def lineInput = Mock(LineInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new LineGraphicInputEntityData(parameter, lineInput)) then: - input.present - input.get().getClass() == inputClass - ((LineGraphicInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert path == GridAndGeoUtils.buildSafeLineString(getGeometry(parameter["path"]) as LineString) } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/graphics/NodeGraphicInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/graphics/NodeGraphicInputFactoryTest.groovy index c98f856ae..b6aac3179 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/graphics/NodeGraphicInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/graphics/NodeGraphicInputFactoryTest.groovy @@ -5,11 +5,13 @@ */ package edu.ie3.datamodel.io.factory.graphics +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.input.graphics.NodeGraphicInputEntityData import edu.ie3.datamodel.io.factory.input.graphics.NodeGraphicInputFactory import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput import edu.ie3.datamodel.utils.GridAndGeoUtils +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import org.locationtech.jts.geom.LineString import spock.lang.Specification @@ -39,13 +41,13 @@ class NodeGraphicInputFactoryTest extends Specification implements FactoryTestHe def nodeInput = Mock(NodeInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new NodeGraphicInputEntityData(parameter, nodeInput)) then: - input.present - input.get().getClass() == inputClass - ((NodeGraphicInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert point == getGeometry(parameter["point"]) assert path == getGeometry(parameter["path"]) @@ -68,13 +70,13 @@ class NodeGraphicInputFactoryTest extends Specification implements FactoryTestHe def nodeInput = Mock(NodeInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new NodeGraphicInputEntityData(parameter, nodeInput)) then: - input.present - input.get().getClass() == inputClass - ((NodeGraphicInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert path == GridAndGeoUtils.buildSafeLineString(getGeometry(parameter["path"]) as LineString) } where: diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/AssetInputEntityFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/AssetInputEntityFactoryTest.groovy index b6f5b3d7f..424ad10ec 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/AssetInputEntityFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/AssetInputEntityFactoryTest.groovy @@ -9,6 +9,7 @@ import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.OperationTime import edu.ie3.datamodel.models.input.AssetInput import edu.ie3.datamodel.models.input.OperatorInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import org.apache.commons.lang3.NotImplementedException import spock.lang.Specification @@ -40,12 +41,12 @@ class AssetInputEntityFactoryTest extends Specification implements FactoryTestHe def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get(new AssetInputEntityData(parameter, inputClass, operatorInput)) + Try input = inputFactory.get(new AssetInputEntityData(parameter, inputClass, operatorInput)) then: - input.present - input.get().getClass() == inputClass - ((TestAssetInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime == OperationTime.notLimited() assert operator == operatorInput @@ -66,12 +67,12 @@ class AssetInputEntityFactoryTest extends Specification implements FactoryTestHe def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get(new AssetInputEntityData(parameter, inputClass, operatorInput)) + Try input = inputFactory.get(new AssetInputEntityData(parameter, inputClass, operatorInput)) then: - input.present - input.get().getClass() == inputClass - ((TestAssetInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime == OperationTime.notLimited() assert operator == operatorInput @@ -99,12 +100,12 @@ class AssetInputEntityFactoryTest extends Specification implements FactoryTestHe def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get(new AssetInputEntityData(parameter, inputClass, operatorInput)) + Try input = inputFactory.get(new AssetInputEntityData(parameter, inputClass, operatorInput)) then: - input.present - input.get().getClass() == inputClass - ((TestAssetInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) @@ -126,12 +127,12 @@ class AssetInputEntityFactoryTest extends Specification implements FactoryTestHe def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get(new AssetInputEntityData(parameter, inputClass, operatorInput)) + Try input = inputFactory.get(new AssetInputEntityData(parameter, inputClass, operatorInput)) then: - input.present - input.get().getClass() == inputClass - ((TestAssetInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert !operationTime.startDate.present assert operationTime.endDate.present @@ -154,12 +155,12 @@ class AssetInputEntityFactoryTest extends Specification implements FactoryTestHe def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get(new AssetInputEntityData(parameter, inputClass, operatorInput)) + Try input = inputFactory.get(new AssetInputEntityData(parameter, inputClass, operatorInput)) then: - input.present - input.get().getClass() == inputClass - ((TestAssetInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) @@ -180,12 +181,12 @@ class AssetInputEntityFactoryTest extends Specification implements FactoryTestHe def inputClass = TestAssetInput when: - Optional input = inputFactory.get(new AssetInputEntityData(parameter, inputClass)) + Try input = inputFactory.get(new AssetInputEntityData(parameter, inputClass)) then: - input.present - input.get().getClass() == inputClass - ((TestAssetInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime == OperationTime.notLimited() assert operator == OperatorInput.NO_OPERATOR_ASSIGNED @@ -204,12 +205,12 @@ class AssetInputEntityFactoryTest extends Specification implements FactoryTestHe def inputClass = TestAssetInput when: - Optional input = inputFactory.get(new AssetInputEntityData(parameter, inputClass)) + Try input = inputFactory.get(new AssetInputEntityData(parameter, inputClass)) then: - input.present - input.get().getClass() == inputClass - ((TestAssetInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) @@ -230,12 +231,12 @@ class AssetInputEntityFactoryTest extends Specification implements FactoryTestHe def inputClass = TestAssetInput when: - Optional input = inputFactory.get(new AssetInputEntityData(parameter, inputClass)) + Try input = inputFactory.get(new AssetInputEntityData(parameter, inputClass)) then: - input.present - input.get().getClass() == inputClass - ((TestAssetInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert !operationTime.startDate.present assert operationTime.endDate.present @@ -257,12 +258,12 @@ class AssetInputEntityFactoryTest extends Specification implements FactoryTestHe def inputClass = TestAssetInput when: - Optional input = inputFactory.get(new AssetInputEntityData(parameter, inputClass)) + Try input = inputFactory.get(new AssetInputEntityData(parameter, inputClass)) then: - input.present - input.get().getClass() == inputClass - ((TestAssetInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) @@ -284,11 +285,11 @@ class AssetInputEntityFactoryTest extends Specification implements FactoryTestHe def inputClass = TestAssetInput when: - inputFactory.get(new AssetInputEntityData(parameter, inputClass)) + Try input = inputFactory.get(new AssetInputEntityData(parameter, inputClass)) then: - FactoryException ex = thrown() - ex.message == + input.failure + input.exception.get().cause.message == "The provided fields [operatesfrom, operatesuntil, uuid] with data \n" + "{operatesfrom -> 2019-01-01T00:00:00+01:00[Europe/Berlin],\n" + "operatesuntil -> 2019-12-31T00:00:00+01:00[Europe/Berlin],\n" + @@ -306,7 +307,7 @@ class AssetInputEntityFactoryTest extends Specification implements FactoryTestHe } @Override - UniqueEntityBuilder copy() { + AssetInputCopyBuilder copy() { throw new NotImplementedException( "Copying of " + this.getClass().simpleName + " entities is not supported yet!") } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/CylindricalStorageInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/CylindricalStorageInputFactoryTest.groovy index e7d5caa21..cf75e1188 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/CylindricalStorageInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/CylindricalStorageInputFactoryTest.groovy @@ -5,11 +5,13 @@ */ package edu.ie3.datamodel.io.factory.input +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.OperationTime import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.thermal.CylindricalStorageInput import edu.ie3.datamodel.models.input.thermal.ThermalBusInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification @@ -39,12 +41,12 @@ class CylindricalStorageInputFactoryTest extends Specification implements Facto def thermalBusInput = Mock(ThermalBusInput) when: - Optional input = inputFactory.get(new ThermalUnitInputEntityData(parameter, inputClass, thermalBusInput)) + Try input = inputFactory.get(new ThermalUnitInputEntityData(parameter, inputClass, thermalBusInput)) then: - input.present - input.get().getClass() == inputClass - ((CylindricalStorageInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime == OperationTime.notLimited() assert operator == OperatorInput.NO_OPERATOR_ASSIGNED diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy index de99b678a..067353976 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.factory.input +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput @@ -12,6 +13,7 @@ import edu.ie3.datamodel.models.input.connector.LineInput import edu.ie3.datamodel.models.input.connector.type.LineTypeInput import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicPoint import edu.ie3.datamodel.utils.GridAndGeoUtils +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import org.locationtech.jts.geom.LineString import spock.lang.Specification @@ -56,12 +58,12 @@ class LineInputFactoryTest extends Specification implements FactoryTestHelper { def typeInput = Mock(LineTypeInput) when: - Optional input = inputFactory.get(new TypedConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) + Try input = inputFactory.get(new TypedConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) then: - input.present - input.get().getClass() == inputClass - ((LineInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) @@ -107,12 +109,12 @@ class LineInputFactoryTest extends Specification implements FactoryTestHelper { def typeInput = Mock(LineTypeInput) when: - Optional input = inputFactory.get(new TypedConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) + Try input = inputFactory.get(new TypedConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) then: - input.present - input.get().getClass() == inputClass - ((LineInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) @@ -158,12 +160,12 @@ class LineInputFactoryTest extends Specification implements FactoryTestHelper { def typeInput = Mock(LineTypeInput) when: - Optional input = inputFactory.get(new TypedConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) + Try input = inputFactory.get(new TypedConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) then: - input.present - input.get().getClass() == inputClass - ((LineInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert geoPosition == GridAndGeoUtils.buildSafeLineString(getGeometry(parameter["geoposition"]) as LineString) } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputFactoryTest.groovy index 8ace08144..d1c669d2c 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputFactoryTest.groovy @@ -5,11 +5,12 @@ */ package edu.ie3.datamodel.io.factory.input - +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.OperationTime import edu.ie3.datamodel.models.input.MeasurementUnitInput import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification @@ -38,12 +39,12 @@ class MeasurementUnitInputFactoryTest extends Specification implements FactoryTe def nodeInput = Mock(NodeInput) when: - Optional input = inputFactory.get(new NodeAssetInputEntityData(parameter, inputClass, nodeInput)) + Try input = inputFactory.get(new NodeAssetInputEntityData(parameter, inputClass, nodeInput)) then: - input.present - input.get().getClass() == inputClass - ((MeasurementUnitInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + ((MeasurementUnitInput) input.data.get()).with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime == OperationTime.notLimited() assert operator == OperatorInput.NO_OPERATOR_ASSIGNED diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy index 65b6d053e..f9f28cafe 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy @@ -5,10 +5,12 @@ */ package edu.ie3.datamodel.io.factory.input +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.voltagelevels.GermanVoltageLevelUtils +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification import tech.units.indriya.ComparableQuantity @@ -45,12 +47,12 @@ class NodeInputFactoryTest extends Specification implements FactoryTestHelper { def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get(new AssetInputEntityData(parameter, inputClass, operatorInput)) + Try input = inputFactory.get(new AssetInputEntityData(parameter, inputClass, operatorInput)) then: - input.present - input.get().getClass() == inputClass - ((NodeInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + ((NodeInput) input.data.get()).with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/OperatorInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/OperatorInputFactoryTest.groovy index 1cd317aba..9a85edab5 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/OperatorInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/OperatorInputFactoryTest.groovy @@ -5,8 +5,10 @@ */ package edu.ie3.datamodel.io.factory.input +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.SimpleEntityData import edu.ie3.datamodel.models.input.OperatorInput +import edu.ie3.datamodel.utils.Try import spock.lang.Specification class OperatorInputFactoryTest extends Specification { @@ -31,12 +33,12 @@ class OperatorInputFactoryTest extends Specification { def inputClass = OperatorInput when: - Optional input = inputFactory.get(new SimpleEntityData(parameter, inputClass)) + Try input = inputFactory.get(new SimpleEntityData(parameter, inputClass)) then: - input.present - input.get().getClass() == inputClass - ((OperatorInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert id == parameter["id"] } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/SwitchInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/SwitchInputFactoryTest.groovy index 795f96e12..9c862e663 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/SwitchInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/SwitchInputFactoryTest.groovy @@ -5,9 +5,11 @@ */ package edu.ie3.datamodel.io.factory.input +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.connector.SwitchInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification @@ -39,12 +41,12 @@ class SwitchInputFactoryTest extends Specification implements FactoryTestHelper def nodeInputB = Mock(NodeInput) when: - Optional input = inputFactory.get(new ConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB)) + Try input = inputFactory.get(new ConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB)) then: - input.present - input.get().getClass() == inputClass - ((SwitchInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/ThermalBusInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/ThermalBusInputFactoryTest.groovy index 0b7e17478..9ef17d5d4 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/ThermalBusInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/ThermalBusInputFactoryTest.groovy @@ -5,8 +5,10 @@ */ package edu.ie3.datamodel.io.factory.input +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.thermal.ThermalBusInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification @@ -35,12 +37,12 @@ class ThermalBusInputFactoryTest extends Specification implements FactoryTestHel def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get(new AssetInputEntityData(parameter, inputClass, operatorInput)) + Try input = inputFactory.get(new AssetInputEntityData(parameter, inputClass, operatorInput)) then: - input.present - input.get().getClass() == inputClass - ((ThermalBusInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/ThermalHouseInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/ThermalHouseInputFactoryTest.groovy index 40b875d29..82c0e7843 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/ThermalHouseInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/ThermalHouseInputFactoryTest.groovy @@ -5,11 +5,13 @@ */ package edu.ie3.datamodel.io.factory.input +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.OperationTime import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.thermal.ThermalBusInput import edu.ie3.datamodel.models.input.thermal.ThermalHouseInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification @@ -39,12 +41,12 @@ class ThermalHouseInputFactoryTest extends Specification implements FactoryTestH def thermalBusInput = Mock(ThermalBusInput) when: - Optional input = inputFactory.get(new ThermalUnitInputEntityData(parameter, inputClass, thermalBusInput)) + Try input = inputFactory.get(new ThermalUnitInputEntityData(parameter, inputClass, thermalBusInput)) then: - input.present - input.get().getClass() == inputClass - ((ThermalHouseInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime == OperationTime.notLimited() assert operator == OperatorInput.NO_OPERATOR_ASSIGNED diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer2WInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer2WInputFactoryTest.groovy index 2d085cd92..260f1bedd 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer2WInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer2WInputFactoryTest.groovy @@ -5,10 +5,12 @@ */ package edu.ie3.datamodel.io.factory.input +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.connector.Transformer2WInput import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification @@ -43,12 +45,12 @@ class Transformer2WInputFactoryTest extends Specification implements FactoryTest def typeInput = Mock(Transformer2WTypeInput) when: - Optional input = inputFactory.get(new TypedConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) + Try input = inputFactory.get(new TypedConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) then: - input.present - input.get().getClass() == inputClass - ((Transformer2WInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer3WInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer3WInputFactoryTest.groovy index 051448604..5a1d7b9c0 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer3WInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer3WInputFactoryTest.groovy @@ -5,11 +5,13 @@ */ package edu.ie3.datamodel.io.factory.input +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.OperationTime import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.connector.Transformer3WInput import edu.ie3.datamodel.models.input.connector.type.Transformer3WTypeInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification @@ -40,12 +42,12 @@ class Transformer3WInputFactoryTest extends Specification implements FactoryTes def typeInput = Mock(Transformer3WTypeInput) when: - Optional input = inputFactory.get(new Transformer3WInputEntityData(parameter, inputClass, nodeInputA, nodeInputB, nodeInputC, typeInput)) + Try input = inputFactory.get(new Transformer3WInputEntityData(parameter, inputClass, nodeInputA, nodeInputB, nodeInputC, typeInput)) then: - input.present - input.get().getClass() == inputClass - ((Transformer3WInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime == OperationTime.notLimited() assert operator == OperatorInput.NO_OPERATOR_ASSIGNED diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/BmInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/BmInputFactoryTest.groovy index 7940b9bdb..04d9e40b7 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/BmInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/BmInputFactoryTest.groovy @@ -5,12 +5,14 @@ */ package edu.ie3.datamodel.io.factory.input.participant +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.system.BmInput import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicPoint import edu.ie3.datamodel.models.input.system.type.BmTypeInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification import tech.units.indriya.quantity.Quantities @@ -49,13 +51,13 @@ class BmInputFactoryTest extends Specification implements FactoryTestHelper { def typeInput = Mock(BmTypeInput) when: - Optional input = inputFactory.get( - new SystemParticipantTypedEntityData(parameter, inputClass,operatorInput, nodeInput, typeInput)) + Try input = inputFactory.get( + new SystemParticipantTypedEntityData(parameter, inputClass, operatorInput, nodeInput, typeInput)) then: - input.present - input.get().getClass() == inputClass - ((BmInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/ChpInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/ChpInputFactoryTest.groovy index 0de257f6e..373da8d5b 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/ChpInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/ChpInputFactoryTest.groovy @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.factory.input.participant +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.system.ChpInput @@ -12,6 +13,7 @@ import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicPoint import edu.ie3.datamodel.models.input.system.type.ChpTypeInput import edu.ie3.datamodel.models.input.thermal.ThermalBusInput import edu.ie3.datamodel.models.input.thermal.ThermalStorageInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification import tech.units.indriya.quantity.Quantities @@ -50,13 +52,13 @@ class ChpInputFactoryTest extends Specification implements FactoryTestHelper { def thermalStorageInput = Mock(ThermalStorageInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new ChpInputEntityData(parameter, operatorInput, nodeInput, typeInput, thermalBusInput, thermalStorageInput)) then: - input.present - input.get().getClass() == inputClass - ((ChpInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EmInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EmInputFactoryTest.groovy index 40ed1d634..ba8ae0d84 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EmInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EmInputFactoryTest.groovy @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.factory.input.participant +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData import edu.ie3.datamodel.models.ControlStrategy import edu.ie3.datamodel.models.EmControlStrategy @@ -12,6 +13,7 @@ import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.system.EmInput import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicPoint +import edu.ie3.datamodel.utils.Try import edu.ie3.util.quantities.PowerSystemUnits import spock.lang.Specification import tech.units.indriya.quantity.Quantities @@ -47,13 +49,13 @@ class EmInputFactoryTest extends Specification { def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new NodeAssetInputEntityData(parameter, inputClass, operatorInput, nodeInput)) then: - input.present - input.get().getClass() == inputClass - ((EmInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) @@ -92,13 +94,13 @@ class EmInputFactoryTest extends Specification { def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new NodeAssetInputEntityData(parameter, inputClass, operatorInput, nodeInput)) then: - input.present - input.get().getClass() == inputClass - ((EmInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) @@ -132,13 +134,13 @@ class EmInputFactoryTest extends Specification { def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new NodeAssetInputEntityData(parameter, inputClass, operatorInput, nodeInput)) then: - input.present - input.get().getClass() == inputClass - ((EmInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.empty assert operationTime.endDate.empty diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EvInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EvInputFactoryTest.groovy index 3e6110395..2d8b5c0d0 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EvInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EvInputFactoryTest.groovy @@ -5,11 +5,13 @@ */ package edu.ie3.datamodel.io.factory.input.participant +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.system.EvInput import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicPoint import edu.ie3.datamodel.models.input.system.type.EvTypeInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification import tech.units.indriya.quantity.Quantities @@ -45,13 +47,13 @@ class EvInputFactoryTest extends Specification implements FactoryTestHelper { def typeInput = Mock(EvTypeInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new SystemParticipantTypedEntityData(parameter, inputClass, operatorInput, nodeInput, typeInput)) then: - input.present - input.get().getClass() == inputClass - ((EvInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EvcsInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EvcsInputFactoryTest.groovy index d01e29071..4f3412e23 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EvcsInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EvcsInputFactoryTest.groovy @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.factory.input.participant +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput @@ -12,6 +13,7 @@ import edu.ie3.datamodel.models.input.system.EvcsInput import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicPoint import edu.ie3.datamodel.models.input.system.type.chargingpoint.ChargingPointTypeUtils import edu.ie3.datamodel.models.input.system.type.evcslocation.EvcsLocationType +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import edu.ie3.util.quantities.PowerSystemUnits import spock.lang.Specification @@ -55,13 +57,13 @@ class EvcsInputFactoryTest extends Specification implements FactoryTestHelper { def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new NodeAssetInputEntityData(parameter, inputClass, operatorInput, nodeInput)) then: - input.present - input.get().getClass() == inputClass - ((EvcsInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) @@ -104,12 +106,12 @@ class EvcsInputFactoryTest extends Specification implements FactoryTestHelper { def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new NodeAssetInputEntityData(parameter, inputClass, operatorInput, nodeInput)) then: - // FactoryException is caught in Factory.java. We get an empty Option back - !input.present + input.failure + input.exception.get().cause.message == "Exception while trying to parse field \"type\" with supposed int value \"-- invalid --\"" } def "A EvcsInputFactory should fail when passing an invalid EvcsLocationType"() { @@ -132,11 +134,11 @@ class EvcsInputFactoryTest extends Specification implements FactoryTestHelper { def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new NodeAssetInputEntityData(parameter, inputClass, operatorInput, nodeInput)) then: - // FactoryException is caught in Factory.java. We get an empty Option back - !input.present + input.failure + input.exception.get().cause.message == "Exception while trying to parse field \"locationtype\" with supposed int value \"-- invalid --\"" } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/FixedFeedInInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/FixedFeedInInputFactoryTest.groovy index 68aaaff58..fa7184b4b 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/FixedFeedInInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/FixedFeedInInputFactoryTest.groovy @@ -12,6 +12,7 @@ import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.system.FixedFeedInInput import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicPoint +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification import tech.units.indriya.quantity.Quantities @@ -48,12 +49,12 @@ class FixedFeedInInputFactoryTest extends Specification implements FactoryTestHe def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get(new NodeAssetInputEntityData(parameter, inputClass, operatorInput, nodeInput)) + Try input = inputFactory.get(new NodeAssetInputEntityData(parameter, inputClass, operatorInput, nodeInput)) then: - input.present - input.get().getClass() == inputClass - ((FixedFeedInInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) @@ -85,11 +86,11 @@ class FixedFeedInInputFactoryTest extends Specification implements FactoryTestHe def nodeInput = Mock(NodeInput) when: - inputFactory.get(new NodeAssetInputEntityData(parameter, inputClass, nodeInput)) + Try input = inputFactory.get(new NodeAssetInputEntityData(parameter, inputClass, nodeInput)) then: - FactoryException ex = thrown() - ex.message == "The provided fields [cosphirated, id, srated, uuid] with data \n" + + input.failure + input.exception.get().cause.message == "The provided fields [cosphirated, id, srated, uuid] with data \n" + "{cosphirated -> 4,\n" + "id -> TestID,\n" + "srated -> 3,\n" + diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/HpInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/HpInputFactoryTest.groovy index 60b7d52fe..47316ab03 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/HpInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/HpInputFactoryTest.groovy @@ -5,12 +5,14 @@ */ package edu.ie3.datamodel.io.factory.input.participant +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.system.HpInput import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicPoint import edu.ie3.datamodel.models.input.system.type.HpTypeInput import edu.ie3.datamodel.models.input.thermal.ThermalBusInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification import tech.units.indriya.quantity.Quantities @@ -47,13 +49,13 @@ class HpInputFactoryTest extends Specification implements FactoryTestHelper { def thermalBusInput = Mock(ThermalBusInput) when: - Optional input = inputFactory.get( - new HpInputEntityData(parameter,operatorInput, nodeInput, typeInput, thermalBusInput)) + Try input = inputFactory.get( + new HpInputEntityData(parameter, operatorInput, nodeInput, typeInput, thermalBusInput)) then: - input.present - input.get().getClass() == inputClass - ((HpInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/LoadInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/LoadInputFactoryTest.groovy index 042f457ee..cede6ac6b 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/LoadInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/LoadInputFactoryTest.groovy @@ -5,15 +5,17 @@ */ package edu.ie3.datamodel.io.factory.input.participant +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData -import edu.ie3.datamodel.models.profile.BdewStandardLoadProfile import edu.ie3.datamodel.models.OperationTime import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.system.LoadInput import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicPoint +import edu.ie3.datamodel.models.profile.BdewStandardLoadProfile import edu.ie3.datamodel.models.profile.NbwTemperatureDependantLoadProfile +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification import tech.units.indriya.quantity.Quantities @@ -49,13 +51,13 @@ class LoadInputFactoryTest extends Specification implements FactoryTestHelper { "srated" : "4", "cosphirated" : "5" ] - Optional input = inputFactory.get( + Try input = inputFactory.get( new NodeAssetInputEntityData(parameter, inputClass, nodeInput)) then: - input.present - input.get().getClass() == inputClass - ((LoadInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime == OperationTime.notLimited() assert operator == OperatorInput.NO_OPERATOR_ASSIGNED diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/PvInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/PvInputFactoryTest.groovy index 425fd3592..4fffbba20 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/PvInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/PvInputFactoryTest.groovy @@ -5,12 +5,14 @@ */ package edu.ie3.datamodel.io.factory.input.participant +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.system.PvInput import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicPoint +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification import tech.units.indriya.quantity.Quantities @@ -54,13 +56,13 @@ class PvInputFactoryTest extends Specification implements FactoryTestHelper { def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new NodeAssetInputEntityData(parameter, inputClass, operatorInput, nodeInput)) then: - input.present - input.get().getClass() == inputClass - ((PvInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/StorageInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/StorageInputFactoryTest.groovy index 71e261663..da40b333d 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/StorageInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/StorageInputFactoryTest.groovy @@ -5,11 +5,13 @@ */ package edu.ie3.datamodel.io.factory.input.participant +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.system.StorageInput import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicPoint import edu.ie3.datamodel.models.input.system.type.StorageTypeInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification import tech.units.indriya.quantity.Quantities @@ -45,13 +47,13 @@ class StorageInputFactoryTest extends Specification implements FactoryTestHelper def typeInput = Mock(StorageTypeInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new SystemParticipantTypedEntityData(parameter, inputClass, operatorInput, nodeInput, typeInput)) then: - input.present - input.get().getClass() == inputClass - ((StorageInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert operationTime.startDate.present assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/WecInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/WecInputFactoryTest.groovy index 5c4fc5f17..cd432e1f5 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/WecInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/WecInputFactoryTest.groovy @@ -5,11 +5,13 @@ */ package edu.ie3.datamodel.io.factory.input.participant +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.system.WecInput import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicPoint import edu.ie3.datamodel.models.input.system.type.WecTypeInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification import tech.units.indriya.quantity.Quantities @@ -46,13 +48,13 @@ class WecInputFactoryTest extends Specification implements FactoryTestHelper { def typeInput = Mock(WecTypeInput) when: - Optional input = inputFactory.get( + Try input = inputFactory.get( new SystemParticipantTypedEntityData(parameter, inputClass, operatorInput, nodeInput, typeInput)) then: - input.present - input.get().getClass() == inputClass - ((WecInput) input.get()).with { + input.success + input.data.get().getClass() == inputClass + input.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert !operationTime.startDate.present assert operationTime.endDate.present diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/result/ConnectorResultFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/result/ConnectorResultFactoryTest.groovy index 985c208af..d22c8abac 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/result/ConnectorResultFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/result/ConnectorResultFactoryTest.groovy @@ -5,12 +5,14 @@ */ package edu.ie3.datamodel.io.factory.result +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.SimpleEntityData import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.result.connector.ConnectorResult import edu.ie3.datamodel.models.result.connector.LineResult import edu.ie3.datamodel.models.result.connector.Transformer2WResult import edu.ie3.datamodel.models.result.connector.Transformer3WResult +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification @@ -51,12 +53,12 @@ class ConnectorResultFactoryTest extends Specification implements FactoryTestHel } when: - Optional result = resultFactory.get(new SimpleEntityData(parameter, modelClass)) + Try result = resultFactory.get(new SimpleEntityData(parameter, modelClass)) then: - result.present - result.get().getClass() == resultingModelClass - ((ConnectorResult) result.get()).with { + result.success + result.data.get().getClass() == resultingModelClass + ((ConnectorResult) result.data.get()).with { assert time == TIME_UTIL.toZonedDateTime(parameter["time"]) assert inputModel == UUID.fromString(parameter["inputModel"]) assert iAAng == getQuant(parameter["iaang"], StandardUnits.ELECTRIC_CURRENT_ANGLE) @@ -65,12 +67,12 @@ class ConnectorResultFactoryTest extends Specification implements FactoryTestHel assert iBMag == getQuant(parameter["ibmag"], StandardUnits.ELECTRIC_CURRENT_MAGNITUDE) } - if (result.get().getClass() == Transformer2WResult) { - assert ((Transformer2WResult) result.get()).tapPos == Integer.parseInt(parameter["tappos"]) + if (result.data.get().getClass() == Transformer2WResult) { + assert ((Transformer2WResult) result.data.get()).tapPos == Integer.parseInt(parameter["tappos"]) } - if (result.get().getClass() == Transformer3WResult) { - Transformer3WResult transformer3WResult = ((Transformer3WResult) result.get()) + if (result.data.get().getClass() == Transformer3WResult) { + Transformer3WResult transformer3WResult = ((Transformer3WResult) result.data.get()) assert transformer3WResult.tapPos == Integer.parseInt(parameter["tappos"]) assert transformer3WResult.iCAng == getQuant(parameter["icang"], StandardUnits.ELECTRIC_CURRENT_ANGLE) assert transformer3WResult.iCMag == getQuant(parameter["icmag"], StandardUnits.ELECTRIC_CURRENT_MAGNITUDE) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/result/FlexOptionsResultFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/result/FlexOptionsResultFactoryTest.groovy index 685632855..c46eabaea 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/result/FlexOptionsResultFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/result/FlexOptionsResultFactoryTest.groovy @@ -9,6 +9,7 @@ import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.SimpleEntityData import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.result.system.FlexOptionsResult +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification @@ -35,12 +36,12 @@ class FlexOptionsResultFactoryTest extends Specification implements FactoryTestH ] when: - Optional result = resultFactory.get(new SimpleEntityData(parameter, FlexOptionsResult)) + Try result = resultFactory.get(new SimpleEntityData(parameter, FlexOptionsResult)) then: - result.present - result.get().getClass() == FlexOptionsResult - ((FlexOptionsResult) result.get()).with { + result.success + result.data.get().getClass() == FlexOptionsResult + ((FlexOptionsResult) result.data.get()).with { assert pRef == getQuant(parameter["pref"], StandardUnits.ACTIVE_POWER_RESULT) assert pMin == getQuant(parameter["pmin"], StandardUnits.ACTIVE_POWER_RESULT) assert pMax == getQuant(parameter["pmax"], StandardUnits.ACTIVE_POWER_RESULT) @@ -60,11 +61,11 @@ class FlexOptionsResultFactoryTest extends Specification implements FactoryTestH ] when: - resultFactory.get(new SimpleEntityData(parameter, FlexOptionsResult)) + Try input = resultFactory.get(new SimpleEntityData(parameter, FlexOptionsResult)) then: - FactoryException ex = thrown() - ex.message == "The provided fields [inputModel, pmin, pref, time] with data \n" + + input.failure + input.exception.get().cause.message == "The provided fields [inputModel, pmin, pref, time] with data \n" + "{inputModel -> 91ec3bcf-1897-4d38-af67-0bf7c9fa73c7,\n" + "pmin -> -1,\n" + "pref -> 2,\n" + diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/result/NodeResultFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/result/NodeResultFactoryTest.groovy index 44b55bde6..9dbc4e294 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/result/NodeResultFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/result/NodeResultFactoryTest.groovy @@ -9,6 +9,7 @@ import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.SimpleEntityData import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.result.NodeResult +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification @@ -34,12 +35,12 @@ class NodeResultFactoryTest extends Specification implements FactoryTestHelper { ] when: - Optional result = resultFactory.get(new SimpleEntityData(parameter, NodeResult)) + Try result = resultFactory.get(new SimpleEntityData(parameter, NodeResult)) then: - result.present - result.get().getClass() == NodeResult - ((NodeResult) result.get()).with { + result.success + result.data.get().getClass() == NodeResult + ((NodeResult) result.data.get()).with { assert vMag == getQuant(parameter["vmag"], StandardUnits.VOLTAGE_MAGNITUDE) assert vAng == getQuant(parameter["vang"], StandardUnits.VOLTAGE_ANGLE) assert time == TIME_UTIL.toZonedDateTime(parameter["time"]) @@ -57,11 +58,11 @@ class NodeResultFactoryTest extends Specification implements FactoryTestHelper { ] when: - resultFactory.get(new SimpleEntityData(parameter, NodeResult)) + Try input = resultFactory.get(new SimpleEntityData(parameter, NodeResult)) then: - FactoryException ex = thrown() - ex.message == "The provided fields [inputModel, time, vmag] with data \n" + + input.failure + input.exception.get().cause.message == "The provided fields [inputModel, time, vmag] with data \n" + "{inputModel -> 91ec3bcf-1897-4d38-af67-0bf7c9fa73c7,\n" + "time -> 2020-01-30 17:26:44,\n" + "vmag -> 2} are invalid for instance of NodeResult. \n" + diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/result/SwitchResultFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/result/SwitchResultFactoryTest.groovy index ec5bd35a4..ff05a4ff8 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/result/SwitchResultFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/result/SwitchResultFactoryTest.groovy @@ -5,8 +5,10 @@ */ package edu.ie3.datamodel.io.factory.result +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.SimpleEntityData import edu.ie3.datamodel.models.result.connector.SwitchResult +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification @@ -33,12 +35,12 @@ class SwitchResultFactoryTest extends Specification implements FactoryTestHelper ] when: - Optional result = resultFactory.get(new SimpleEntityData(parameter, SwitchResult)) + Try result = resultFactory.get(new SimpleEntityData(parameter, SwitchResult)) then: - result.present - result.get().getClass() == SwitchResult - ((SwitchResult) result.get()).with { + result.success + result.data.get().getClass() == SwitchResult + ((SwitchResult) result.data.get()).with { assert time == TIME_UTIL.toZonedDateTime(parameter["time"]) assert inputModel == UUID.fromString(parameter["inputModel"]) assert closed == Boolean.parseBoolean(parameter["closed"]) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/result/SystemParticipantResultFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/result/SystemParticipantResultFactoryTest.groovy index 0be343451..5ca875cda 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/result/SystemParticipantResultFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/result/SystemParticipantResultFactoryTest.groovy @@ -9,6 +9,7 @@ import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.SimpleEntityData import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.result.system.* +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification import tech.units.indriya.unit.Units @@ -55,12 +56,12 @@ class SystemParticipantResultFactoryTest extends Specification implements Factor } when: - Optional result = resultFactory.get(new SimpleEntityData(parameter, modelClass)) + Try result = resultFactory.get(new SimpleEntityData(parameter, modelClass)) then: - result.present - result.get().getClass() == resultingModelClass - ((SystemParticipantResult) result.get()).with { + result.success + result.data.get().getClass() == resultingModelClass + ((SystemParticipantResult) result.data.get()).with { assert p == getQuant(parameter["p"], StandardUnits.ACTIVE_POWER_RESULT) assert q == getQuant(parameter["q"], StandardUnits.REACTIVE_POWER_RESULT) assert time == TIME_UTIL.toZonedDateTime(parameter["time"]) @@ -68,19 +69,19 @@ class SystemParticipantResultFactoryTest extends Specification implements Factor } if (modelClass == EvResult) { - assert (((EvResult) result.get()).soc == getQuant(parameter["soc"], Units.PERCENT)) + assert (((EvResult) result.data.get()).soc == getQuant(parameter["soc"], Units.PERCENT)) } if (modelClass == StorageResult) { - assert (((StorageResult) result.get()).soc == getQuant(parameter["soc"], Units.PERCENT)) + assert (((StorageResult) result.data.get()).soc == getQuant(parameter["soc"], Units.PERCENT)) } if (modelClass == HpResult) { - assert(((HpResult)result.get()).getqDot() == getQuant(parameter["qDot"], StandardUnits.Q_DOT_RESULT)) + assert(((HpResult) result.data.get()).getqDot() == getQuant(parameter["qDot"], StandardUnits.Q_DOT_RESULT)) } if (modelClass == ChpResult) { - assert(((ChpResult)result.get()).getqDot() == getQuant(parameter["qDot"], StandardUnits.Q_DOT_RESULT)) + assert(((ChpResult) result.data.get()).getqDot() == getQuant(parameter["qDot"], StandardUnits.Q_DOT_RESULT)) } where: @@ -109,12 +110,12 @@ class SystemParticipantResultFactoryTest extends Specification implements Factor "q" : "2" ] when: - Optional result = resultFactory.get(new SimpleEntityData(parameter, StorageResult)) + Try result = resultFactory.get(new SimpleEntityData(parameter, StorageResult)) then: - result.present - result.get().getClass() == StorageResult - ((StorageResult) result.get()).with { + result.success + result.data.get().getClass() == StorageResult + ((StorageResult) result.data.get()).with { assert p == getQuant(parameter["p"], StandardUnits.ACTIVE_POWER_RESULT) assert q == getQuant(parameter["q"], StandardUnits.REACTIVE_POWER_RESULT) assert soc == getQuant(parameter["soc"], Units.PERCENT) @@ -132,11 +133,11 @@ class SystemParticipantResultFactoryTest extends Specification implements Factor "q" : "2" ] when: - resultFactory.get(new SimpleEntityData(parameter, WecResult)) + Try result = resultFactory.get(new SimpleEntityData(parameter, WecResult)) then: - FactoryException ex = thrown() - ex.message == "The provided fields [inputModel, q, time] with data \n" + + result.failure + result.exception.get().cause.message == "The provided fields [inputModel, q, time] with data \n" + "{inputModel -> 91ec3bcf-1777-4d38-af67-0bf7c9fa73c7,\n" + "q -> 2,\n" + "time -> 2020-01-30 17:26:44} are invalid for instance of WecResult. \n" + diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/result/ThermalResultFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/result/ThermalResultFactoryTest.groovy index 4e0fb66ca..3e9456fe2 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/result/ThermalResultFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/result/ThermalResultFactoryTest.groovy @@ -5,11 +5,13 @@ */ package edu.ie3.datamodel.io.factory.result +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.SimpleEntityData import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.result.thermal.CylindricalStorageResult import edu.ie3.datamodel.models.result.thermal.ThermalHouseResult import edu.ie3.datamodel.models.result.thermal.ThermalUnitResult +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification import tech.units.indriya.quantity.Quantities @@ -39,12 +41,12 @@ class ThermalResultFactoryTest extends Specification implements FactoryTestHelpe "fillLevel" : "20" ] when: - Optional result = resultFactory.get(new SimpleEntityData(parameter, CylindricalStorageResult)) + Try result = resultFactory.get(new SimpleEntityData(parameter, CylindricalStorageResult)) then: - result.present - result.get().getClass() == CylindricalStorageResult - ((CylindricalStorageResult) result.get()).with { + result.success + result.data.get().getClass() == CylindricalStorageResult + ((CylindricalStorageResult) result.data.get()).with { assert time == TIME_UTIL.toZonedDateTime(parameter.get("time")) assert inputModel == UUID.fromString(parameter.get("inputModel")) assert qDot == Quantities.getQuantity(Double.parseDouble(parameter.get("qDot")), StandardUnits.HEAT_DEMAND) @@ -63,12 +65,12 @@ class ThermalResultFactoryTest extends Specification implements FactoryTestHelpe "indoorTemperature": "21" ] when: - Optional result = resultFactory.get(new SimpleEntityData(parameter, ThermalHouseResult)) + Try result = resultFactory.get(new SimpleEntityData(parameter, ThermalHouseResult)) then: - result.present - result.get().getClass() == ThermalHouseResult - ((ThermalHouseResult) result.get()).with { + result.success + result.data.get().getClass() == ThermalHouseResult + ((ThermalHouseResult) result.data.get()).with { assert time == TIME_UTIL.toZonedDateTime(parameter.get("time")) assert inputModel == UUID.fromString(parameter.get("inputModel")) assert qDot == Quantities.getQuantity(Double.parseDouble(parameter.get("qDot")), StandardUnits.HEAT_DEMAND) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/timeseries/CosmoIdCoordinateFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/timeseries/CosmoIdCoordinateFactoryTest.groovy index 8ca8bbd6a..117999ce4 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/timeseries/CosmoIdCoordinateFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/timeseries/CosmoIdCoordinateFactoryTest.groovy @@ -5,7 +5,6 @@ */ package edu.ie3.datamodel.io.factory.timeseries -import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.SimpleFactoryData import edu.ie3.util.geo.GeoUtils import org.apache.commons.lang3.tuple.Pair @@ -31,14 +30,18 @@ class CosmoIdCoordinateFactoryTest extends Specification { "latgeo", "longgeo" ] as Set - def validSimpleFactoryData = new SimpleFactoryData([ + + Map parameter = [ "tid": "1", "id": "106580", "latgeo": "39.602772", "longgeo": "1.279336", "latrot": "-10", "longrot": "-6.8125" - ] as Map, Pair) + ] + + + def validSimpleFactoryData = new SimpleFactoryData(parameter, Pair) when: @@ -51,40 +54,44 @@ class CosmoIdCoordinateFactoryTest extends Specification { def "A COSMO id to coordinate factory refuses to build from invalid data"() { given: - def invalidSimpleFactoryData = new SimpleFactoryData([ + Map parameter = [ "tid": "1", "id": "106580", "latrot": "-10", "longrot": "-6.8125" - ] as Map, Pair) + ] + + def invalidSimpleFactoryData = new SimpleFactoryData(parameter, Pair) when: - factory.get(invalidSimpleFactoryData) + def actual = factory.get(invalidSimpleFactoryData) then: - def e = thrown(FactoryException) - e.message.startsWith("The provided fields [id, latrot, longrot, tid] with data \n{id -> 106580,\nlatrot" + + actual.failure + actual.exception.get().cause.message.startsWith("The provided fields [id, latrot, longrot, tid] with data \n{id -> 106580,\nlatrot" + " -> -10,\nlongrot -> -6.8125,\ntid -> 1} are invalid for instance of Pair.") } def "A COSMO id to coordinate factory builds model from valid data"() { given: - def validSimpleFactoryData = new SimpleFactoryData([ + Map parameter = [ "tid": "1", "id": "106580", "latgeo": "39.602772", "longgeo": "1.279336", "latrot": "-10", "longrot": "-6.8125" - ] as Map, Pair) + ] + + def validSimpleFactoryData = new SimpleFactoryData(parameter, Pair) Pair expectedPair = Pair.of(106580, GeoUtils.buildPoint(39.602772, 1.279336)) when: def actual = factory.get(validSimpleFactoryData) then: - actual.present - actual.get().with { + actual.success + actual.data.get().with { assert it.key == expectedPair.key assert it.value.equalsExact(expectedPair.value, 1E-6) } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/timeseries/IconIdCoordinateFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/timeseries/IconIdCoordinateFactoryTest.groovy index d4a004b79..b40b44387 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/timeseries/IconIdCoordinateFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/timeseries/IconIdCoordinateFactoryTest.groovy @@ -5,7 +5,6 @@ */ package edu.ie3.datamodel.io.factory.timeseries -import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.SimpleFactoryData import edu.ie3.util.geo.GeoUtils import org.apache.commons.lang3.tuple.Pair @@ -29,11 +28,13 @@ class IconIdCoordinateFactoryTest extends Specification { "longitude", "coordinatetype" ] as Set - def validSimpleFactoryData = new SimpleFactoryData([ + Map parameter = [ "id":"477295", "latitude":"52.312", "longitude":"12.812", - "coordinatetype":"ICON"] as Map, Pair) + "coordinatetype":"ICON"] + + def validSimpleFactoryData = new SimpleFactoryData(parameter, Pair) when: def actual = factory.getFields(validSimpleFactoryData) @@ -45,35 +46,38 @@ class IconIdCoordinateFactoryTest extends Specification { def "A COSMO id to coordinate factory refuses to build from invalid data"() { given: - def invalidSimpleFactoryData = new SimpleFactoryData([ + Map parameter = [ "id":"477295", "latitude":"52.312", - "coordinatetype":"ICON"] as Map, Pair) + "coordinatetype":"ICON"] + + def invalidSimpleFactoryData = new SimpleFactoryData(parameter, Pair) when: - factory.get(invalidSimpleFactoryData) + def actual = factory.get(invalidSimpleFactoryData) then: - def e = thrown(FactoryException) - e.message.startsWith("The provided fields [coordinatetype, id, latitude] with data \n{coordinatetype -> " + + actual.failure + actual.exception.get().cause.message.startsWith("The provided fields [coordinatetype, id, latitude] with data \n{coordinatetype -> " + "ICON,\nid -> 477295,\nlatitude -> 52.312} are invalid for instance of Pair. ") } def "A COSMO id to coordinate factory builds model from valid data"() { given: - def validSimpleFactoryData = new SimpleFactoryData([ + Map parameter = [ "id":"477295", "latitude":"52.312", "longitude":"12.812", - "coordinatetype":"ICON"] as Map, Pair) + "coordinatetype":"ICON"] + def validSimpleFactoryData = new SimpleFactoryData(parameter, Pair) Pair expectedPair = Pair.of(477295, GeoUtils.buildPoint(52.312, 12.812)) when: def actual = factory.get(validSimpleFactoryData) then: - actual.present - actual.get().with { + actual.success + actual.data.get().with { assert it.key == expectedPair.key assert it.value.equalsExact(expectedPair.value, 1E-6) } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/timeseries/TimeBasedSimpleValueFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/timeseries/TimeBasedSimpleValueFactoryTest.groovy index e05cd8989..a91436c4e 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/timeseries/TimeBasedSimpleValueFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/timeseries/TimeBasedSimpleValueFactoryTest.groovy @@ -11,12 +11,7 @@ import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue -import edu.ie3.datamodel.models.value.EnergyPriceValue -import edu.ie3.datamodel.models.value.HeatAndPValue -import edu.ie3.datamodel.models.value.HeatAndSValue -import edu.ie3.datamodel.models.value.HeatDemandValue -import edu.ie3.datamodel.models.value.PValue -import edu.ie3.datamodel.models.value.SValue +import edu.ie3.datamodel.models.value.* import edu.ie3.util.TimeUtil import spock.lang.Shared import spock.lang.Specification diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/LineTypeInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/LineTypeInputFactoryTest.groovy index a5304c04c..0248bf204 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/LineTypeInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/LineTypeInputFactoryTest.groovy @@ -5,6 +5,8 @@ */ package edu.ie3.datamodel.io.factory.typeinput +import edu.ie3.datamodel.exceptions.FactoryException +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import edu.ie3.datamodel.io.factory.SimpleEntityData import edu.ie3.datamodel.models.StandardUnits @@ -38,12 +40,12 @@ class LineTypeInputFactoryTest extends Specification implements FactoryTestHelpe def typeInputClass = LineTypeInput when: - Optional typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) + Try typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) then: - typeInput.present - typeInput.get().getClass() == typeInputClass - ((LineTypeInput) typeInput.get()).with { + typeInput.success + typeInput.data.get().getClass() == typeInputClass + typeInput.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert id == parameter["id"] assert b == getQuant(parameter["b"], StandardUnits.SUSCEPTANCE_PER_LENGTH) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/SystemParticipantTypeInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/SystemParticipantTypeInputFactoryTest.groovy index 5c5cd1d8f..b5f40c2ac 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/SystemParticipantTypeInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/SystemParticipantTypeInputFactoryTest.groovy @@ -10,6 +10,7 @@ import edu.ie3.datamodel.io.factory.SimpleEntityData import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicPoint import edu.ie3.datamodel.models.input.system.type.* +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification import tech.units.indriya.quantity.Quantities @@ -55,13 +56,13 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac def typeInputClass = EvTypeInput when: - Optional typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) + Try typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) then: - typeInput.present - typeInput.get().getClass() == typeInputClass + typeInput.success + typeInput.data.get().getClass() == typeInputClass - ((EvTypeInput) typeInput.get()).with { + ((EvTypeInput) typeInput.data.get()).with { assert uuid == UUID.fromString(parameter["uuid"]) assert id == parameter["id"] assert capex == getQuant(parameter["capex"], StandardUnits.CAPEX) @@ -90,13 +91,13 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac def typeInputClass = HpTypeInput when: - Optional typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) + Try typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) then: - typeInput.present - typeInput.get().getClass() == typeInputClass + typeInput.success + typeInput.data.get().getClass() == typeInputClass - ((HpTypeInput) typeInput.get()).with { + ((HpTypeInput) typeInput.data.get()).with { assert uuid == UUID.fromString(parameter["uuid"]) assert id == parameter["id"] assert capex == getQuant(parameter["capex"], StandardUnits.CAPEX) @@ -124,13 +125,13 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac def typeInputClass = BmTypeInput when: - Optional typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) + Try typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) then: - typeInput.present - typeInput.get().getClass() == typeInputClass + typeInput.success + typeInput.data.get().getClass() == typeInputClass - ((BmTypeInput) typeInput.get()).with { + ((BmTypeInput) typeInput.data.get()).with { assert uuid == UUID.fromString(parameter["uuid"]) assert id == parameter["id"] assert capex == getQuant(parameter["capex"], StandardUnits.CAPEX) @@ -162,13 +163,13 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac def typeInputClass = WecTypeInput when: - Optional typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) + Try typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) then: - typeInput.present - typeInput.get().getClass() == typeInputClass + typeInput.success + typeInput.data.get().getClass() == typeInputClass - ((WecTypeInput) typeInput.get()).with { + ((WecTypeInput) typeInput.data.get()).with { assert uuid == UUID.fromString(parameter["uuid"]) assert id == parameter["id"] assert capex == getQuant(parameter["capex"], StandardUnits.CAPEX) @@ -209,13 +210,13 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac def typeInputClass = ChpTypeInput when: - Optional typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) + Try typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) then: - typeInput.present - typeInput.get().getClass() == typeInputClass + typeInput.success + typeInput.data.get().getClass() == typeInputClass - ((ChpTypeInput) typeInput.get()).with { + ((ChpTypeInput) typeInput.data.get()).with { assert uuid == UUID.fromString(parameter["uuid"]) assert id == parameter["id"] assert capex == getQuant(parameter["capex"], StandardUnits.CAPEX) @@ -252,13 +253,13 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac def typeInputClass = StorageTypeInput when: - Optional typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) + Try typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) then: - typeInput.present - typeInput.get().getClass() == typeInputClass + typeInput.success + typeInput.data.get().getClass() == typeInputClass - ((StorageTypeInput) typeInput.get()).with { + ((StorageTypeInput) typeInput.data.get()).with { assert uuid == UUID.fromString(parameter["uuid"]) assert id == parameter["id"] assert capex == getQuant(parameter["capex"], StandardUnits.CAPEX) @@ -295,11 +296,11 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac ] when: - typeInputFactory.get(new SimpleEntityData(parameter, StorageTypeInput)) + Try input = typeInputFactory.get(new SimpleEntityData(parameter, StorageTypeInput)) then: - FactoryException ex = thrown() - ex.message == "The provided fields [capex, cosPhiRated, dod, estorage, eta, id, lifetime, opex, pmax, pmin, srated, uuid] with data \n" + + input.failure + input.exception.get().cause.message == "The provided fields [capex, cosPhiRated, dod, estorage, eta, id, lifetime, opex, pmax, pmin, srated, uuid] with data \n" + "{capex -> 3,\n" + "cosPhiRated -> 6,\n" + "dod -> 10,\n" + diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/Transformer2WTypeInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/Transformer2WTypeInputFactoryTest.groovy index 53d197b31..848de5662 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/Transformer2WTypeInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/Transformer2WTypeInputFactoryTest.groovy @@ -5,9 +5,11 @@ */ package edu.ie3.datamodel.io.factory.typeinput +import edu.ie3.datamodel.exceptions.FactoryException import edu.ie3.datamodel.io.factory.SimpleEntityData import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import spock.lang.Specification @@ -45,13 +47,13 @@ class Transformer2WTypeInputFactoryTest extends Specification implements Factory def typeInputClass = Transformer2WTypeInput when: - Optional typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) + Try typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) then: - typeInput.present - typeInput.get().getClass() == typeInputClass + typeInput.success + typeInput.data.get().getClass() == typeInputClass - typeInput.get().with { + typeInput.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert id == parameter["id"] assert rSc == getQuant(parameter["rsc"], StandardUnits.RESISTANCE) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/Transformer3WTypeInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/Transformer3WTypeInputFactoryTest.groovy index 0c204f2cb..d01862624 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/Transformer3WTypeInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/Transformer3WTypeInputFactoryTest.groovy @@ -5,6 +5,8 @@ */ package edu.ie3.datamodel.io.factory.typeinput +import edu.ie3.datamodel.exceptions.FactoryException +import edu.ie3.datamodel.utils.Try import edu.ie3.test.helper.FactoryTestHelper import edu.ie3.datamodel.io.factory.SimpleEntityData import edu.ie3.datamodel.models.StandardUnits @@ -51,13 +53,13 @@ class Transformer3WTypeInputFactoryTest extends Specification implements Factory def typeInputClass = Transformer3WTypeInput when: - Optional typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) + Try typeInput = typeInputFactory.get(new SimpleEntityData(parameter, typeInputClass)) then: - typeInput.present - typeInput.get().getClass() == typeInputClass + typeInput.success + typeInput.data.get().getClass() == typeInputClass - typeInput.get().with { + typeInput.data.get().with { assert uuid == UUID.fromString(parameter["uuid"]) assert id == parameter["id"] assert sRatedA == getQuant(parameter["srateda"], StandardUnits.S_RATED) diff --git a/src/test/groovy/edu/ie3/datamodel/io/naming/DefaultDirectoryHierarchyTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/naming/DefaultDirectoryHierarchyTest.groovy index af30a17b3..8852605c4 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/naming/DefaultDirectoryHierarchyTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/naming/DefaultDirectoryHierarchyTest.groovy @@ -28,7 +28,7 @@ class DefaultDirectoryHierarchyTest extends Specification { } def basePathString(String gridName) { - FilenameUtils.concat(tmpDirectory.toString(), gridName) + tmpDirectory.resolve(gridName) } def cleanup() { @@ -41,21 +41,21 @@ class DefaultDirectoryHierarchyTest extends Specification { def basePath = basePathString(gridName) when: - def dfh = new DefaultDirectoryHierarchy(tmpDirectory.toString(), gridName) + def dfh = new DefaultDirectoryHierarchy(tmpDirectory, gridName) then: try { - dfh.baseDirectory == Paths.get(basePath) + dfh.baseDirectory.get() == basePath dfh.subDirectories.size() == 9 - dfh.subDirectories.get(Paths.get(Stream.of(basePath, "input", "grid").collect(Collectors.joining(File.separator)))) == true - dfh.subDirectories.get(Paths.get(Stream.of(basePath, "input", "participants").collect(Collectors.joining(File.separator)))) == true - dfh.subDirectories.get(Paths.get(Stream.of(basePath, "input", "participants", "time_series").collect(Collectors.joining(File.separator)))) == false - dfh.subDirectories.get(Paths.get(Stream.of(basePath, "input", "global").collect(Collectors.joining(File.separator)))) == true - dfh.subDirectories.get(Paths.get(Stream.of(basePath, "input", "thermal").collect(Collectors.joining(File.separator)))) == false - dfh.subDirectories.get(Paths.get(Stream.of(basePath, "input", "graphics").collect(Collectors.joining(File.separator)))) == false - dfh.subDirectories.get(Paths.get(Stream.of(basePath, "results", "grid").collect(Collectors.joining(File.separator)))) == false - dfh.subDirectories.get(Paths.get(Stream.of(basePath, "results", "participants").collect(Collectors.joining(File.separator)))) == false - dfh.subDirectories.get(Paths.get(Stream.of(basePath, "results", "thermal").collect(Collectors.joining(File.separator)))) == false + dfh.subDirectories.get(basePath.resolve(Path.of("input", "grid"))) == true + dfh.subDirectories.get(basePath.resolve(Path.of("input", "participants"))) == true + dfh.subDirectories.get(basePath.resolve(Path.of("input", "participants", "time_series"))) == false + dfh.subDirectories.get(basePath.resolve(Path.of("input", "global"))) == true + dfh.subDirectories.get(basePath.resolve(Path.of("input", "thermal"))) == false + dfh.subDirectories.get(basePath.resolve(Path.of("input", "graphics"))) == false + dfh.subDirectories.get(basePath.resolve(Path.of("results", "grid"))) == false + dfh.subDirectories.get(basePath.resolve(Path.of("results", "participants"))) == false + dfh.subDirectories.get(basePath.resolve(Path.of("results", "thermal"))) == false } catch (TestFailedException e) { FileIOUtils.deleteRecursively(tmpDirectory) throw e @@ -65,8 +65,8 @@ class DefaultDirectoryHierarchyTest extends Specification { def "A DefaultFileHierarchy is able to create a correct hierarchy of mandatory directories"() { given: def gridName = "test_grid" - def basePath = Paths.get(basePathString(gridName)) - def dfh = new DefaultDirectoryHierarchy(tmpDirectory.toString(), gridName) + def basePath = basePathString(gridName) + def dfh = new DefaultDirectoryHierarchy(tmpDirectory, gridName) when: dfh.createDirs() @@ -91,8 +91,8 @@ class DefaultDirectoryHierarchyTest extends Specification { def "A DefaultFileHierarchy is able to create a correct hierarchy of mandatory and optional directories"() { given: def gridName = "test_grid" - def basePath = Paths.get(basePathString(gridName)) - def dfh = new DefaultDirectoryHierarchy(tmpDirectory.toString(), gridName) + def basePath = basePathString(gridName) + def dfh = new DefaultDirectoryHierarchy(tmpDirectory, gridName) when: dfh.createDirs(true) @@ -112,7 +112,7 @@ class DefaultDirectoryHierarchyTest extends Specification { def "A DefaultFileHierarchy is able to validate a correct hierarchy of mandatory and optional directories"() { given: def gridName = "test_grid" - def dfh = new DefaultDirectoryHierarchy(tmpDirectory.toString(), gridName) + def dfh = new DefaultDirectoryHierarchy(tmpDirectory, gridName) dfh.createDirs(true) when: @@ -125,8 +125,8 @@ class DefaultDirectoryHierarchyTest extends Specification { def "A DefaultFileHierarchy throws an exception when trying to validate a missing hierarchy of mandatory and optional directories"() { given: def gridName = "test_grid" - def basePath = Paths.get(basePathString(gridName)) - def dfh = new DefaultDirectoryHierarchy(tmpDirectory.toString(), gridName) + def basePath = basePathString(gridName) + def dfh = new DefaultDirectoryHierarchy(tmpDirectory, gridName) when: dfh.validate() @@ -139,8 +139,8 @@ class DefaultDirectoryHierarchyTest extends Specification { def "A DefaultFileHierarchy throws an exception when trying to validate a file instead of a hierarchy"() { given: def gridName = "test_grid" - def basePath = Paths.get(basePathString(gridName)) - def dfh = new DefaultDirectoryHierarchy(tmpDirectory.toString(), gridName) + def basePath = basePathString(gridName) + def dfh = new DefaultDirectoryHierarchy(tmpDirectory, gridName) Files.createFile(basePath) when: @@ -154,8 +154,8 @@ class DefaultDirectoryHierarchyTest extends Specification { def "A DefaultFileHierarchy throws an exception when trying to validate a hierarchy with missing mandatory directory"() { given: def gridName = "test_grid" - def basePath = Paths.get(basePathString(gridName)) - def dfh = new DefaultDirectoryHierarchy(tmpDirectory.toString(), gridName) + def basePath = basePathString(gridName) + def dfh = new DefaultDirectoryHierarchy(tmpDirectory, gridName) dfh.createDirs() def globalDirectory = dfh.subDirectories.entrySet().find { entry -> entry.key.toString().endsWith("global") }.key Files.delete(globalDirectory) @@ -171,8 +171,8 @@ class DefaultDirectoryHierarchyTest extends Specification { def "A DefaultFileHierarchy throws an exception when trying to validate a hierarchy with file instead of mandatory directory"() { given: def gridName = "test_grid" - def basePath = Paths.get(basePathString(gridName)) - def dfh = new DefaultDirectoryHierarchy(tmpDirectory.toString(), gridName) + def basePath = basePathString(gridName) + def dfh = new DefaultDirectoryHierarchy(tmpDirectory, gridName) dfh.createDirs() def globalDirectory = dfh.subDirectories.entrySet().find { entry -> entry.key.toString().endsWith("global") }.key Files.delete(globalDirectory) @@ -189,8 +189,8 @@ class DefaultDirectoryHierarchyTest extends Specification { def "A DefaultFileHierarchy throws an exception when trying to validate a hierarchy with file instead of optional directory"() { given: def gridName = "test_grid" - def basePath = Paths.get(basePathString(gridName)) - def dfh = new DefaultDirectoryHierarchy(tmpDirectory.toString(), gridName) + def basePath = basePathString(gridName) + def dfh = new DefaultDirectoryHierarchy(tmpDirectory, gridName) dfh.createDirs(true) def thermalDirectory = dfh.subDirectories.entrySet().find { entry -> entry.key.toString().endsWith("input" + File.separator + "thermal") }.key Files.delete(thermalDirectory) @@ -207,9 +207,9 @@ class DefaultDirectoryHierarchyTest extends Specification { def "A DefaultFileHierarchy throws an exception when trying to validate a hierarchy with unsupported extra directory"() { given: def gridName = "test_grid" - def basePath = Paths.get(basePathString(gridName)) - def fifthWheelPath = Paths.get(FilenameUtils.concat(basePathString(gridName), "something_on_top")) - def dfh = new DefaultDirectoryHierarchy(tmpDirectory.toString(), gridName) + def basePath = basePathString(gridName) + def fifthWheelPath = basePathString(gridName).resolve("something_on_top") + def dfh = new DefaultDirectoryHierarchy(tmpDirectory, gridName) dfh.createDirs(true) Files.createDirectory(fifthWheelPath) diff --git a/src/test/groovy/edu/ie3/datamodel/io/naming/FileNamingStrategyTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/naming/FileNamingStrategyTest.groovy index d6b17d4d4..e36fe9c9d 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/naming/FileNamingStrategyTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/naming/FileNamingStrategyTest.groovy @@ -69,7 +69,7 @@ import spock.lang.Specification import tech.units.indriya.quantity.Quantities import java.nio.file.Files -import java.nio.file.Paths +import java.nio.file.Path import java.time.ZonedDateTime class FileNamingStrategyTest extends Specification { @@ -81,7 +81,7 @@ class FileNamingStrategyTest extends Specification { def setup() { def tmpPath = Files.createTempDirectory("psdm_file_naming_strategy") - defaultHierarchy = new DefaultDirectoryHierarchy(tmpPath.toString(), "test_grid") + defaultHierarchy = new DefaultDirectoryHierarchy(tmpPath, "test_grid") flatHierarchy = new FlatDirectoryHierarchy() simpleEntityNaming = new EntityPersistenceNamingStrategy() } @@ -98,28 +98,28 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - LoadResult || "test_grid" + File.separator + "results" + File.separator + "participants" - FixedFeedInResult || "test_grid" + File.separator + "results" + File.separator + "participants" - BmResult || "test_grid" + File.separator + "results" + File.separator + "participants" - PvResult || "test_grid" + File.separator + "results" + File.separator + "participants" - ChpResult || "test_grid" + File.separator + "results" + File.separator + "participants" - WecResult || "test_grid" + File.separator + "results" + File.separator + "participants" - StorageResult || "test_grid" + File.separator + "results" + File.separator + "participants" - EvcsResult || "test_grid" + File.separator + "results" + File.separator + "participants" - EvResult || "test_grid" + File.separator + "results" + File.separator + "participants" - EmResult || "test_grid" + File.separator + "results" + File.separator + "participants" - FlexOptionsResult || "test_grid" + File.separator + "results" + File.separator + "participants" - Transformer2WResult || "test_grid" + File.separator + "results" + File.separator + "grid" - Transformer3WResult || "test_grid" + File.separator + "results" + File.separator + "grid" - LineResult || "test_grid" + File.separator + "results" + File.separator + "grid" - SwitchResult || "test_grid" + File.separator + "results" + File.separator + "grid" - NodeResult || "test_grid" + File.separator + "results" + File.separator + "grid" - CylindricalStorageResult || "test_grid" + File.separator + "results" + File.separator + "thermal" - ThermalHouseResult || "test_grid" + File.separator + "results" + File.separator + "thermal" + modelClass || expectedPath + LoadResult || Path.of("test_grid", "results", "participants") + FixedFeedInResult || Path.of("test_grid", "results", "participants") + BmResult || Path.of("test_grid", "results", "participants") + PvResult || Path.of("test_grid", "results", "participants") + ChpResult || Path.of("test_grid", "results", "participants") + WecResult || Path.of("test_grid", "results", "participants") + StorageResult || Path.of("test_grid", "results", "participants") + EvcsResult || Path.of("test_grid", "results", "participants") + EvResult || Path.of("test_grid", "results", "participants") + EmResult || Path.of("test_grid", "results", "participants") + FlexOptionsResult || Path.of("test_grid", "results", "participants") + Transformer2WResult || Path.of("test_grid", "results", "grid") + Transformer3WResult || Path.of("test_grid", "results", "grid") + LineResult || Path.of("test_grid", "results", "grid") + SwitchResult || Path.of("test_grid", "results", "grid") + NodeResult || Path.of("test_grid", "results", "grid") + CylindricalStorageResult || Path.of("test_grid", "results", "thermal") + ThermalHouseResult || Path.of("test_grid", "results", "thermal") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffixes should return valid directory paths for all input assets models"() { @@ -131,28 +131,28 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - FixedFeedInInput || "test_grid" + File.separator + "input" + File.separator + "participants" - PvInput || "test_grid" + File.separator + "input" + File.separator + "participants" - WecInput || "test_grid" + File.separator + "input" + File.separator + "participants" - ChpInput || "test_grid" + File.separator + "input" + File.separator + "participants" - BmInput || "test_grid" + File.separator + "input" + File.separator + "participants" - EvInput || "test_grid" + File.separator + "input" + File.separator + "participants" - EvcsInput || "test_grid" + File.separator + "input" + File.separator + "participants" - LoadInput || "test_grid" + File.separator + "input" + File.separator + "participants" - StorageInput || "test_grid" + File.separator + "input" + File.separator + "participants" - HpInput || "test_grid" + File.separator + "input" + File.separator + "participants" - LineInput || "test_grid" + File.separator + "input" + File.separator + "grid" - SwitchInput || "test_grid" + File.separator + "input" + File.separator + "grid" - NodeInput || "test_grid" + File.separator + "input" + File.separator + "grid" - MeasurementUnitInput || "test_grid" + File.separator + "input" + File.separator + "grid" - Transformer2WInput || "test_grid" + File.separator + "input" + File.separator + "grid" - Transformer3WInput || "test_grid" + File.separator + "input" + File.separator + "grid" - CylindricalStorageInput || "test_grid" + File.separator + "input" + File.separator + "thermal" - ThermalHouseInput || "test_grid" + File.separator + "input" + File.separator + "thermal" + modelClass || expectedPath + FixedFeedInInput || Path.of("test_grid", "input", "participants") + PvInput || Path.of("test_grid", "input", "participants") + WecInput || Path.of("test_grid", "input", "participants") + ChpInput || Path.of("test_grid", "input", "participants") + BmInput || Path.of("test_grid", "input", "participants") + EvInput || Path.of("test_grid", "input", "participants") + EvcsInput || Path.of("test_grid", "input", "participants") + LoadInput || Path.of("test_grid", "input", "participants") + StorageInput || Path.of("test_grid", "input", "participants") + HpInput || Path.of("test_grid", "input", "participants") + LineInput || Path.of("test_grid", "input", "grid") + SwitchInput || Path.of("test_grid", "input", "grid") + NodeInput || Path.of("test_grid", "input", "grid") + MeasurementUnitInput || Path.of("test_grid", "input", "grid") + Transformer2WInput || Path.of("test_grid", "input", "grid") + Transformer3WInput || Path.of("test_grid", "input", "grid") + CylindricalStorageInput || Path.of("test_grid", "input", "thermal") + ThermalHouseInput || Path.of("test_grid", "input", "thermal") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffixes should return valid directory paths for all input types models"() { @@ -164,19 +164,19 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - BmTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" - ChpTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" - EvTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" - HpTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" - StorageTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" - WecTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" - LineTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" - Transformer2WTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" - Transformer3WTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" + modelClass || expectedPath + BmTypeInput || Path.of("test_grid", "input", "global") + ChpTypeInput || Path.of("test_grid", "input", "global") + EvTypeInput || Path.of("test_grid", "input", "global") + HpTypeInput || Path.of("test_grid", "input", "global") + StorageTypeInput || Path.of("test_grid", "input", "global") + WecTypeInput || Path.of("test_grid", "input", "global") + LineTypeInput || Path.of("test_grid", "input", "global") + Transformer2WTypeInput || Path.of("test_grid", "input", "global") + Transformer3WTypeInput || Path.of("test_grid", "input", "global") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffixes should return valid directory paths for a graphic input Model"() { @@ -188,12 +188,12 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - NodeGraphicInput || "test_grid" + File.separator + "input" + File.separator + "graphics" - LineGraphicInput || "test_grid" + File.separator + "input" + File.separator + "graphics" + modelClass || expectedPath + NodeGraphicInput || Path.of("test_grid", "input", "graphics") + LineGraphicInput || Path.of("test_grid", "input", "graphics") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffix should return valid directory path for load profile time series"() { @@ -210,7 +210,7 @@ class FileNamingStrategyTest extends Specification { where: clazz || expected - LoadProfileInput || "test_grid" + File.separator + "input" + File.separator + "global" + LoadProfileInput || Path.of("test_grid", "input", "global") } def "A FileNamingStrategy with DefaultHierarchy and should return valid directory path for individual time series"() { @@ -227,7 +227,7 @@ class FileNamingStrategyTest extends Specification { where: clazz || expected - IndividualTimeSeries || "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "time_series" + IndividualTimeSeries || Path.of("test_grid", "input", "participants", "time_series") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffixes should return valid file paths for all result models"() { @@ -239,28 +239,28 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - LoadResult || "test_grid" + File.separator + "results" + File.separator + "participants" + File.separator + "load_res" - FixedFeedInResult || "test_grid" + File.separator + "results" + File.separator + "participants" + File.separator + "fixed_feed_in_res" - BmResult || "test_grid" + File.separator + "results" + File.separator + "participants" + File.separator + "bm_res" - PvResult || "test_grid" + File.separator + "results" + File.separator + "participants" + File.separator + "pv_res" - ChpResult || "test_grid" + File.separator + "results" + File.separator + "participants" + File.separator + "chp_res" - WecResult || "test_grid" + File.separator + "results" + File.separator + "participants" + File.separator + "wec_res" - StorageResult || "test_grid" + File.separator + "results" + File.separator + "participants" + File.separator + "storage_res" - EvcsResult || "test_grid" + File.separator + "results" + File.separator + "participants" + File.separator + "evcs_res" - EvResult || "test_grid" + File.separator + "results" + File.separator + "participants" + File.separator + "ev_res" - EmResult || "test_grid" + File.separator + "results" + File.separator + "participants" + File.separator + "em_res" - FlexOptionsResult || "test_grid" + File.separator + "results" + File.separator + "participants" + File.separator + "flex_options_res" - Transformer2WResult || "test_grid" + File.separator + "results" + File.separator + "grid" + File.separator + "transformer_2_w_res" - Transformer3WResult || "test_grid" + File.separator + "results" + File.separator + "grid" + File.separator + "transformer_3_w_res" - LineResult || "test_grid" + File.separator + "results" + File.separator + "grid" + File.separator + "line_res" - SwitchResult || "test_grid" + File.separator + "results" + File.separator + "grid" + File.separator + "switch_res" - NodeResult || "test_grid" + File.separator + "results" + File.separator + "grid" + File.separator + "node_res" - CylindricalStorageResult || "test_grid" + File.separator + "results" + File.separator + "thermal" + File.separator + "cylindrical_storage_res" - ThermalHouseResult || "test_grid" + File.separator + "results" + File.separator + "thermal" + File.separator + "thermal_house_res" + modelClass || expectedPath + LoadResult || Path.of("test_grid", "results", "participants", "load_res") + FixedFeedInResult || Path.of("test_grid", "results", "participants", "fixed_feed_in_res") + BmResult || Path.of("test_grid", "results", "participants", "bm_res") + PvResult || Path.of("test_grid", "results", "participants", "pv_res") + ChpResult || Path.of("test_grid", "results", "participants", "chp_res") + WecResult || Path.of("test_grid", "results", "participants", "wec_res") + StorageResult || Path.of("test_grid", "results", "participants", "storage_res") + EvcsResult || Path.of("test_grid", "results", "participants", "evcs_res") + EvResult || Path.of("test_grid", "results", "participants", "ev_res") + EmResult || Path.of("test_grid", "results", "participants", "em_res") + FlexOptionsResult || Path.of("test_grid", "results", "participants", "flex_options_res") + Transformer2WResult || Path.of("test_grid", "results", "grid", "transformer_2_w_res") + Transformer3WResult || Path.of("test_grid", "results", "grid", "transformer_3_w_res") + LineResult || Path.of("test_grid", "results", "grid", "line_res") + SwitchResult || Path.of("test_grid", "results", "grid", "switch_res") + NodeResult || Path.of("test_grid", "results", "grid", "node_res") + CylindricalStorageResult || Path.of("test_grid", "results", "thermal", "cylindrical_storage_res") + ThermalHouseResult || Path.of("test_grid", "results", "thermal", "thermal_house_res") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffixes should return valid file paths for all other input assets models"() { @@ -272,18 +272,18 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - LineInput || "test_grid" + File.separator + "input" + File.separator + "grid" + File.separator + "line_input" - SwitchInput || "test_grid" + File.separator + "input" + File.separator + "grid" + File.separator + "switch_input" - NodeInput || "test_grid" + File.separator + "input" + File.separator + "grid" + File.separator + "node_input" - MeasurementUnitInput || "test_grid" + File.separator + "input" + File.separator + "grid" + File.separator + "measurement_unit_input" - Transformer2WInput || "test_grid" + File.separator + "input" + File.separator + "grid" + File.separator + "transformer_2_w_input" - Transformer3WInput || "test_grid" + File.separator + "input" + File.separator + "grid" + File.separator + "transformer_3_w_input" - CylindricalStorageInput || "test_grid" + File.separator + "input" + File.separator + "thermal" + File.separator + "cylindrical_storage_input" - ThermalHouseInput || "test_grid" + File.separator + "input" + File.separator + "thermal" + File.separator + "thermal_house_input" + modelClass || expectedPath + LineInput || Path.of("test_grid", "input", "grid", "line_input") + SwitchInput || Path.of("test_grid", "input", "grid", "switch_input") + NodeInput || Path.of("test_grid", "input", "grid", "node_input") + MeasurementUnitInput || Path.of("test_grid", "input", "grid", "measurement_unit_input") + Transformer2WInput || Path.of("test_grid", "input", "grid", "transformer_2_w_input") + Transformer3WInput || Path.of("test_grid", "input", "grid", "transformer_3_w_input") + CylindricalStorageInput || Path.of("test_grid", "input", "thermal", "cylindrical_storage_input") + ThermalHouseInput || Path.of("test_grid", "input", "thermal", "thermal_house_input") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffixes should return valid file paths for all system input assets models"() { @@ -295,20 +295,20 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - FixedFeedInInput || "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "fixed_feed_in_input" - PvInput || "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "pv_input" - WecInput || "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "wec_input" - ChpInput || "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "chp_input" - BmInput || "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "bm_input" - EvInput || "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "ev_input" - LoadInput || "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "load_input" - StorageInput || "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "storage_input" - HpInput || "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "hp_input" - EvcsInput || "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "evcs_input" + modelClass || expectedPath + FixedFeedInInput || Path.of("test_grid", "input", "participants", "fixed_feed_in_input") + PvInput || Path.of("test_grid", "input", "participants", "pv_input") + WecInput || Path.of("test_grid", "input", "participants", "wec_input") + ChpInput || Path.of("test_grid", "input", "participants", "chp_input") + BmInput || Path.of("test_grid", "input", "participants", "bm_input") + EvInput || Path.of("test_grid", "input", "participants", "ev_input") + LoadInput || Path.of("test_grid", "input", "participants", "load_input") + StorageInput || Path.of("test_grid", "input", "participants", "storage_input") + HpInput || Path.of("test_grid", "input", "participants", "hp_input") + EvcsInput || Path.of("test_grid", "input", "participants", "evcs_input") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffixes should return valid file paths for all input types models"() { @@ -320,19 +320,19 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - BmTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" + File.separator + "bm_type_input" - ChpTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" + File.separator + "chp_type_input" - EvTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" + File.separator + "ev_type_input" - HpTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" + File.separator + "hp_type_input" - LineTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" + File.separator + "line_type_input" - StorageTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" + File.separator + "storage_type_input" - Transformer2WTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" + File.separator + "transformer_2_w_type_input" - Transformer3WTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" + File.separator + "transformer_3_w_type_input" - WecTypeInput || "test_grid" + File.separator + "input" + File.separator + "global" + File.separator + "wec_type_input" + modelClass || expectedPath + BmTypeInput || Path.of("test_grid", "input", "global", "bm_type_input") + ChpTypeInput || Path.of("test_grid", "input", "global", "chp_type_input") + EvTypeInput || Path.of("test_grid", "input", "global", "ev_type_input") + HpTypeInput || Path.of("test_grid", "input", "global", "hp_type_input") + LineTypeInput || Path.of("test_grid", "input", "global", "line_type_input") + StorageTypeInput || Path.of("test_grid", "input", "global", "storage_type_input") + Transformer2WTypeInput || Path.of("test_grid", "input", "global", "transformer_2_w_type_input") + Transformer3WTypeInput || Path.of("test_grid", "input", "global", "transformer_3_w_type_input") + WecTypeInput || Path.of("test_grid", "input", "global", "wec_type_input") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffixes should return valid directory path for a Load Parameter Model"() { @@ -344,11 +344,11 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - RandomLoadParameters || "test_grid" + File.separator + "input" + File.separator + "global" + modelClass || expectedPath + RandomLoadParameters || Path.of("test_grid", "input", "global") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffixes should return valid file path for a Load Parameter Model"() { @@ -360,11 +360,11 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - RandomLoadParameters || "test_grid" + File.separator + "input" + File.separator + "global" + File.separator + "random_load_parameters_input" + modelClass || expectedPath + RandomLoadParameters || Path.of("test_grid", "input", "global", "random_load_parameters_input") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffixes should return valid file paths for a graphic input Model"() { @@ -376,12 +376,12 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - NodeGraphicInput || "test_grid" + File.separator + "input" + File.separator + "graphics" + File.separator + "node_graphic_input" - LineGraphicInput || "test_grid" + File.separator + "input" + File.separator + "graphics" + File.separator + "line_graphic_input" + modelClass || expectedPath + NodeGraphicInput || Path.of("test_grid", "input", "graphics", "node_graphic_input") + LineGraphicInput || Path.of("test_grid", "input", "graphics", "line_graphic_input") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffix should return valid file path for individual time series"() { @@ -403,7 +403,7 @@ class FileNamingStrategyTest extends Specification { where: clazz | uuid || expectedFilePath - IndividualTimeSeries | UUID.fromString("4881fda2-bcee-4f4f-a5bb-6a09bf785276") || "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "time_series" + File.separator + "its_c_4881fda2-bcee-4f4f-a5bb-6a09bf785276" + IndividualTimeSeries | UUID.fromString("4881fda2-bcee-4f4f-a5bb-6a09bf785276") || Path.of("test_grid", "input", "participants", "time_series", "its_c_4881fda2-bcee-4f4f-a5bb-6a09bf785276") } def "A FileNamingStrategy with DefaultHierarchy and with pre- or suffix should return valid file path for individual time series"() { @@ -425,7 +425,7 @@ class FileNamingStrategyTest extends Specification { where: clazz | uuid || expectedFileName - IndividualTimeSeries | UUID.fromString("4881fda2-bcee-4f4f-a5bb-6a09bf785276") || "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "time_series" + File.separator + "aa_its_c_4881fda2-bcee-4f4f-a5bb-6a09bf785276_zz" + IndividualTimeSeries | UUID.fromString("4881fda2-bcee-4f4f-a5bb-6a09bf785276") || Path.of("test_grid", "input", "participants", "time_series", "aa_its_c_4881fda2-bcee-4f4f-a5bb-6a09bf785276_zz") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffix should return valid file path for load profile time series"() { @@ -444,7 +444,7 @@ class FileNamingStrategyTest extends Specification { where: clazz | uuid | type || expectedFileName - LoadProfileInput | UUID.fromString("bee0a8b6-4788-4f18-bf72-be52035f7304") | BdewStandardLoadProfile.G3 || "test_grid" + File.separator + "input" + File.separator + "global" + File.separator + "lpts_g3_bee0a8b6-4788-4f18-bf72-be52035f7304" + LoadProfileInput | UUID.fromString("bee0a8b6-4788-4f18-bf72-be52035f7304") | BdewStandardLoadProfile.G3 || Path.of("test_grid", "input", "global", "lpts_g3_bee0a8b6-4788-4f18-bf72-be52035f7304") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffixes should return valid directory path for time series mapping"() { @@ -456,7 +456,7 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "time_series" + res.get() == Path.of("test_grid", "input", "participants", "time_series") } def "A FileNamingStrategy with DefaultHierarchy and without pre- or suffixes should return valid file path for time series mapping"() { @@ -468,7 +468,7 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "time_series" + File.separator + "time_series_mapping" + res.get() == Path.of("test_grid", "input", "participants", "time_series", "time_series_mapping") } def "A FileNamingStrategy with DefaultHierarchy and pre- and suffix should return valid file path for time series mapping"() { @@ -480,7 +480,7 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == "test_grid" + File.separator + "input" + File.separator + "participants" + File.separator + "time_series" + File.separator + "prefix_time_series_mapping_suffix" + res.get() == Path.of("test_grid", "input", "participants", "time_series", "prefix_time_series_mapping_suffix") } @@ -638,28 +638,28 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - LoadResult || "load_res" - FixedFeedInResult || "fixed_feed_in_res" - BmResult || "bm_res" - PvResult || "pv_res" - ChpResult || "chp_res" - WecResult || "wec_res" - StorageResult || "storage_res" - EvcsResult || "evcs_res" - EvResult || "ev_res" - EmResult || "em_res" - FlexOptionsResult || "flex_options_res" - Transformer2WResult || "transformer_2_w_res" - Transformer3WResult || "transformer_3_w_res" - LineResult || "line_res" - SwitchResult || "switch_res" - NodeResult || "node_res" - CylindricalStorageResult || "cylindrical_storage_res" - ThermalHouseResult || "thermal_house_res" + modelClass || expectedPath + LoadResult || Path.of("load_res") + FixedFeedInResult || Path.of("fixed_feed_in_res") + BmResult || Path.of("bm_res") + PvResult || Path.of("pv_res") + ChpResult || Path.of("chp_res") + WecResult || Path.of("wec_res") + StorageResult || Path.of("storage_res") + EvcsResult || Path.of("evcs_res") + EvResult || Path.of("ev_res") + EmResult || Path.of("em_res") + FlexOptionsResult || Path.of("flex_options_res") + Transformer2WResult || Path.of("transformer_2_w_res") + Transformer3WResult || Path.of("transformer_3_w_res") + LineResult || Path.of("line_res") + SwitchResult || Path.of("switch_res") + NodeResult || Path.of("node_res") + CylindricalStorageResult || Path.of("cylindrical_storage_res") + ThermalHouseResult || Path.of("thermal_house_res") } def "A FileNamingStrategy with FlatHierarchy and without pre- or suffixes should return valid file paths for all other system input classes"() { @@ -671,28 +671,28 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - FixedFeedInInput || "fixed_feed_in_input" - PvInput || "pv_input" - WecInput || "wec_input" - ChpInput || "chp_input" - BmInput || "bm_input" - EvInput || "ev_input" - EvcsInput || "evcs_input" - LoadInput || "load_input" - StorageInput || "storage_input" - HpInput || "hp_input" - LineInput || "line_input" - SwitchInput || "switch_input" - NodeInput || "node_input" - MeasurementUnitInput || "measurement_unit_input" - Transformer2WInput || "transformer_2_w_input" - Transformer3WInput || "transformer_3_w_input" - CylindricalStorageInput || "cylindrical_storage_input" - ThermalHouseInput || "thermal_house_input" + modelClass || expectedPath + FixedFeedInInput || Path.of("fixed_feed_in_input") + PvInput || Path.of("pv_input") + WecInput || Path.of("wec_input") + ChpInput || Path.of("chp_input") + BmInput || Path.of("bm_input") + EvInput || Path.of("ev_input") + EvcsInput || Path.of("evcs_input") + LoadInput || Path.of("load_input") + StorageInput || Path.of("storage_input") + HpInput || Path.of("hp_input") + LineInput || Path.of("line_input") + SwitchInput || Path.of("switch_input") + NodeInput || Path.of("node_input") + MeasurementUnitInput || Path.of("measurement_unit_input") + Transformer2WInput || Path.of("transformer_2_w_input") + Transformer3WInput || Path.of("transformer_3_w_input") + CylindricalStorageInput || Path.of("cylindrical_storage_input") + ThermalHouseInput || Path.of("thermal_house_input") } def "A FileNamingStrategy with FlatHierarchy and without pre- or suffixes should return valid file paths for all system characteristic and type input classes"() { @@ -704,19 +704,19 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - BmTypeInput || "bm_type_input" - ChpTypeInput || "chp_type_input" - EvTypeInput || "ev_type_input" - HpTypeInput || "hp_type_input" - StorageTypeInput || "storage_type_input" - WecTypeInput || "wec_type_input" - LineTypeInput || "line_type_input" - Transformer2WTypeInput || "transformer_2_w_type_input" - Transformer3WTypeInput || "transformer_3_w_type_input" + modelClass || expectedPath + BmTypeInput || Path.of("bm_type_input") + ChpTypeInput || Path.of("chp_type_input") + EvTypeInput || Path.of("ev_type_input") + HpTypeInput || Path.of("hp_type_input") + StorageTypeInput || Path.of("storage_type_input") + WecTypeInput || Path.of("wec_type_input") + LineTypeInput || Path.of("line_type_input") + Transformer2WTypeInput || Path.of("transformer_2_w_type_input") + Transformer3WTypeInput || Path.of("transformer_3_w_type_input") } def "A FileNamingStrategy with FlatHierarchy and without pre- or suffixes should return valid file paths for all graphics input classes"() { @@ -728,12 +728,12 @@ class FileNamingStrategyTest extends Specification { then: res.present - res.get() == expectedString + res.get() == expectedPath where: - modelClass || expectedString - NodeGraphicInput || "node_graphic_input" - LineGraphicInput || "line_graphic_input" + modelClass || expectedPath + NodeGraphicInput || Path.of("node_graphic_input") + LineGraphicInput || Path.of("line_graphic_input") } def "A FileNamingStrategy with FlatHierarchy does return valid file path for load profile time series"() { @@ -752,7 +752,7 @@ class FileNamingStrategyTest extends Specification { where: clazz | uuid | type || expectedFilePath - LoadProfileInput | UUID.fromString("bee0a8b6-4788-4f18-bf72-be52035f7304") | BdewStandardLoadProfile.G3 || "lpts_g3_bee0a8b6-4788-4f18-bf72-be52035f7304" + LoadProfileInput | UUID.fromString("bee0a8b6-4788-4f18-bf72-be52035f7304") | BdewStandardLoadProfile.G3 || Path.of("lpts_g3_bee0a8b6-4788-4f18-bf72-be52035f7304") } def "A FileNamingStrategy with FlatHierarchy does return valid file path for individual time series"() { @@ -774,7 +774,7 @@ class FileNamingStrategyTest extends Specification { where: clazz | uuid || expectedFilePath - IndividualTimeSeries | UUID.fromString("4881fda2-bcee-4f4f-a5bb-6a09bf785276") || "its_c_4881fda2-bcee-4f4f-a5bb-6a09bf785276" + IndividualTimeSeries | UUID.fromString("4881fda2-bcee-4f4f-a5bb-6a09bf785276") || Path.of("its_c_4881fda2-bcee-4f4f-a5bb-6a09bf785276") } String escapedFileSeparator = File.separator == "\\" ? "\\\\" : File.separator @@ -826,7 +826,7 @@ class FileNamingStrategyTest extends Specification { def "Trying to extract time series meta information throws an Exception, if it is provided a malformed string"() { given: def fns = new FileNamingStrategy(simpleEntityNaming, flatHierarchy) - def path = Paths.get("/bla/foo") + def path = Path.of("/bla/foo") when: fns.timeSeriesMetaInformation(path) @@ -839,7 +839,7 @@ class FileNamingStrategyTest extends Specification { def "The FileNamingStrategy extracts correct meta information from a valid time series file name"() { given: def fns = new FileNamingStrategy(simpleEntityNaming, flatHierarchy) - def path = Paths.get(pathString) + def path = Path.of(pathString) when: def metaInformation = fns.timeSeriesMetaInformation(path) @@ -865,7 +865,7 @@ class FileNamingStrategyTest extends Specification { def "The FileNamingStrategy extracts correct meta information from a valid time series file name with pre- and suffix"() { given: def fns = new FileNamingStrategy(new EntityPersistenceNamingStrategy("prefix", "suffix"), flatHierarchy) - def path = Paths.get(pathString) + def path = Path.of(pathString) when: def metaInformation = fns.timeSeriesMetaInformation(path) @@ -929,7 +929,7 @@ class FileNamingStrategyTest extends Specification { def "The FileNamingStrategy throw an IllegalArgumentException, if the column scheme is malformed."() { given: def fns = new FileNamingStrategy(simpleEntityNaming, flatHierarchy) - def path = Paths.get("/bla/foo/its_whoops_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv") + def path = Path.of("/bla/foo/its_whoops_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv") when: fns.timeSeriesMetaInformation(path) @@ -942,7 +942,7 @@ class FileNamingStrategyTest extends Specification { def "The FileNamingStrategy extracts correct meta information from a valid load profile time series file name"() { given: def fns = new FileNamingStrategy(simpleEntityNaming, flatHierarchy) - def path = Paths.get("/bla/foo/lpts_g3_bee0a8b6-4788-4f18-bf72-be52035f7304.csv") + def path = Path.of("/bla/foo/lpts_g3_bee0a8b6-4788-4f18-bf72-be52035f7304.csv") when: def metaInformation = fns.timeSeriesMetaInformation(path) @@ -958,7 +958,7 @@ class FileNamingStrategyTest extends Specification { def "The FileNamingStrategy extracts correct meta information from a valid load profile time series file name with pre- and suffix"() { given: def fns = new FileNamingStrategy(new EntityPersistenceNamingStrategy("prefix", "suffix"), flatHierarchy) - def path = Paths.get("/bla/foo/prefix_lpts_g3_bee0a8b6-4788-4f18-bf72-be52035f7304_suffix.csv") + def path = Path.of("/bla/foo/prefix_lpts_g3_bee0a8b6-4788-4f18-bf72-be52035f7304_suffix.csv") when: def metaInformation = fns.timeSeriesMetaInformation(path) @@ -980,7 +980,7 @@ class FileNamingStrategyTest extends Specification { then: idFilePath.present - idFilePath.get() == "prefix_coordinates_suffix" + idFilePath.get() == Path.of("prefix_coordinates_suffix") } def "The FileNamingStrategy with DefaultHierarchy returns the Id Coordinate file path correctly"() { @@ -991,7 +991,6 @@ class FileNamingStrategyTest extends Specification { then: idFilePath.present - idFilePath.get() == defaultHierarchy.baseDirectory.get() + File.separator + "prefix_coordinates_suffix" + idFilePath.get() == defaultHierarchy.baseDirectory.get().resolve("prefix_coordinates_suffix") } - } \ No newline at end of file diff --git a/src/test/groovy/edu/ie3/datamodel/io/processor/ProcessorProviderTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/processor/ProcessorProviderTest.groovy index 32a6279b4..5fffb0fa1 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/processor/ProcessorProviderTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/processor/ProcessorProviderTest.groovy @@ -45,6 +45,7 @@ import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileEntry import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileInput import edu.ie3.datamodel.models.value.* +import edu.ie3.datamodel.utils.Try import edu.ie3.test.common.TimeSeriesTestData import edu.ie3.util.TimeUtil import spock.lang.Specification @@ -224,19 +225,27 @@ class ProcessorProviderTest extends Specification implements TimeSeriesTestData PvResult pvResult = new PvResult(uuid, TimeUtil.withDefaults.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) and: - Optional processorResult = provider.handleEntity(pvResult) + Try, ProcessorProviderException> result = provider.handleEntity(pvResult) then: - processorResult.present - Map resultMap = processorResult.get() + result.success + Map resultMap = result.data.get() + resultMap.size() == 5 resultMap == expectedMap when: - Optional result = provider.handleEntity(new WecResult(uuid, TimeUtil.withDefaults.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q)) + Try, ProcessorProviderException> entityTry = provider.handleEntity(new WecResult(uuid, TimeUtil.withDefaults.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q)) then: - !result.present + entityTry.failure + ProcessorProviderException ex = entityTry.exception.get() + [ + "Cannot find a suitable processor for provided class with name 'WecResult'. This provider's processors can process: ", + "PvResult", + "EvResult" + ] + .every { str -> ex.message.contains(str) } } def "A ProcessorProvider returns an empty Optional, if none of the assigned processors is able to handle a time series"() { @@ -248,10 +257,12 @@ class ProcessorProviderTest extends Specification implements TimeSeriesTestData ProcessorProvider provider = new ProcessorProvider([], timeSeriesProcessorMap) when: - Optional>> actual = provider.handleTimeSeries(individualIntTimeSeries) + provider.handleTimeSeries(individualIntTimeSeries) then: - !actual.present + Exception ex = thrown() + ex.class == ProcessorProviderException + ex.message == "Cannot find processor for time series combination 'TimeSeriesProcessorKey{timeSeriesClass=class edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries, entryClass=class edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue, valueClass=class edu.ie3.datamodel.models.timeseries.IntValue}'. Either your provider is not properly initialized or there is no implementation to process this entity class!)" } def "A ProcessorProvider handles a time series correctly"() { @@ -263,10 +274,9 @@ class ProcessorProviderTest extends Specification implements TimeSeriesTestData ProcessorProvider provider = new ProcessorProvider([], timeSeriesProcessorMap) when: - Optional>> actual = provider.handleTimeSeries(individualEnergyPriceTimeSeries) + Set> actual = provider.handleTimeSeries(individualEnergyPriceTimeSeries) then: - actual.present - actual.get() == individualEnergyPriceTimeSeriesProcessed + actual == individualEnergyPriceTimeSeriesProcessed } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy index defe04b4d..65dc1e1a6 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy @@ -63,8 +63,7 @@ class InputEntityProcessorTest extends Specification { def processingResult = processor.handleEntity(validResult) then: "make sure that the result is as expected " - processingResult.present - processingResult.get() == expectedResults + processingResult == expectedResults } def "A InputEntityProcessor should serialize a provided ConnectorInput correctly"() { @@ -76,9 +75,7 @@ class InputEntityProcessorTest extends Specification { def processingResult = processor.handleEntity(validInput) then: "make sure that the result is as expected " - processingResult.present - - processingResult.get() == expectedResult + processingResult == expectedResult where: modelClass | modelInstance || expectedResult @@ -146,9 +143,7 @@ class InputEntityProcessorTest extends Specification { def processingResult = processor.handleEntity(validInput) then: "make sure that the result is as expected " - processingResult.present - - processingResult.get().forEach { k, v -> + processingResult.forEach { k, v -> if (k != "nodeInternal") // the internal 3w node is always randomly generated, hence we can skip to test on this assert (v == expectedResult.get(k)) } @@ -296,11 +291,10 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(validNode) + Map actual = processor.handleEntity(validNode) then: - actual.present - actual.get() == expected + actual == expected } def "The InputEntityProcessor should serialize a provided NodeGraphicInput with path correctly"() { @@ -316,11 +310,10 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(validNode) + Map actual = processor.handleEntity(validNode) then: - actual.present - actual.get() == expected + actual == expected } def "The InputEntityProcessor should serialize a provided LineGraphicInput correctly"() { @@ -335,11 +328,10 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(validNode) + Map actual = processor.handleEntity(validNode) then: - actual.present - actual.get() == expected + actual == expected } def "The InputEntityProcessor should serialize a provided OperatorInput correctly"() { @@ -352,11 +344,10 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(operator) + Map actual = processor.handleEntity(operator) then: - actual.present - actual.get() == expected + actual == expected } def "The InputEntityProcessor should serialize a provided RandomLoadParameters correctly"() { @@ -390,11 +381,10 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(parameters) + Map actual = processor.handleEntity(parameters) then: - actual.present - actual.get() == expected + actual == expected } def "The InputEntityProcessor should serialize a provided WecTypeInput correctly"() { @@ -415,11 +405,10 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(type) + Map actual = processor.handleEntity(type) then: - actual.present - actual.get() == expected + actual == expected } def "The InputEntityProcessor should serialize a provided Transformer2WTypeInput correctly"() { @@ -445,11 +434,10 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(type) + Map actual = processor.handleEntity(type) then: - actual.present - actual.get() == expected + actual == expected } def "The InputEntityProcessor should serialize a provided Transformer3WTypeInput correctly"() { @@ -481,11 +469,10 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(type) + Map actual = processor.handleEntity(type) then: - actual.present - actual.get() == expected + actual == expected } def "The InputEntityProcessor should serialize a provided LineTypeInput correctly"() { @@ -504,11 +491,10 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(type) + Map actual = processor.handleEntity(type) then: - actual.present - actual.get() == expected + actual == expected } def "The InputEntityProcessor should serialize a provided EvTypeInput correctly"() { @@ -527,11 +513,10 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(type) + Map actual = processor.handleEntity(type) then: - actual.present - actual.get() == expected + actual == expected } def "The InputEntityProcessor should serialize a provided ChpTypeInput correctly"() { @@ -552,11 +537,10 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(type) + Map actual = processor.handleEntity(type) then: - actual.present - actual.get() == expected + actual == expected } def "The InputEntityProcessor should serialize a provided HpTypeInput correctly"() { @@ -574,11 +558,10 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(type) + Map actual = processor.handleEntity(type) then: - actual.present - actual.get() == expected + actual == expected } def "The InputEntityProcessor should serialize a provided BmTypeInput correctly"() { @@ -597,11 +580,10 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(type) + Map actual = processor.handleEntity(type) then: - actual.present - actual.get() == expected + actual == expected } def "The InputEntityProcessor should serialize a provided StorageTypeInput correctly"() { @@ -625,11 +607,10 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(type) + Map actual = processor.handleEntity(type) then: - actual.present - actual.get() == expected + actual == expected } def "The InputEntityProcessor should serialize an entity but ignore the operator field when OperatorInput is equal to NO_OPERATOR_ASSIGNED"() { @@ -660,10 +641,9 @@ class InputEntityProcessorTest extends Specification { ] when: - Optional> actual = processor.handleEntity(nodeWithOutOperator) + Map actual = processor.handleEntity(nodeWithOutOperator) then: - actual.present - actual.get() == expected + actual == expected } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/processor/result/ResultEntityProcessorTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/processor/result/ResultEntityProcessorTest.groovy index 9071a0bca..94f55d0ad 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/processor/result/ResultEntityProcessorTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/processor/result/ResultEntityProcessorTest.groovy @@ -73,8 +73,7 @@ class ResultEntityProcessorTest extends Specification { def validProcessedElement = sysPartResProcessor.handleEntity(validResult) then: - validProcessedElement.present - validProcessedElement.get() == expectedResults + validProcessedElement == expectedResults where: modelClass | validSystemParticipantResult || expectedResults @@ -89,7 +88,6 @@ class ResultEntityProcessorTest extends Specification { StorageResult | new StorageResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, p, q, soc) || expectedSocResults HpResult | new HpResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, p, q, qDot) || expectedQDotResults EmResult | new EmResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, p, q) || expectedStandardResults - } def "A ResultEntityProcessor should throw an exception if the provided class is not registered"() { @@ -124,9 +122,7 @@ class ResultEntityProcessorTest extends Specification { def validProcessedElement = sysPartResProcessor.handleEntity(validResult) then: - validProcessedElement.present - validProcessedElement.get() == expectedResults - + validProcessedElement == expectedResults } def "A ResultEntityProcessor should serialize a FlexOptionsResult correctly"() { @@ -153,9 +149,7 @@ class ResultEntityProcessorTest extends Specification { def validProcessedElement = sysPartResProcessor.handleEntity(validResult) then: - validProcessedElement.present - validProcessedElement.get() == expectedResults - + validProcessedElement == expectedResults } @Shared @@ -229,8 +223,7 @@ class ResultEntityProcessorTest extends Specification { def validProcessedElement = sysPartResProcessor.handleEntity(validResult) then: - validProcessedElement.present - validProcessedElement.get() == expectedResults + validProcessedElement == expectedResults where: modelClass | validConnectorResult || expectedResults @@ -261,9 +254,7 @@ class ResultEntityProcessorTest extends Specification { def validProcessedElement = sysPartResProcessor.handleEntity(validResult) then: - validProcessedElement.present - validProcessedElement.get() == expectedResults - + validProcessedElement == expectedResults } def "A ResultEntityProcessor should throw an EntityProcessorException when it receives an entity result that is not eligible"() { @@ -280,7 +271,6 @@ class ResultEntityProcessorTest extends Specification { EntityProcessorException exception = thrown() exception.message == "Cannot process InvalidTestResult.class with this EntityProcessor. " + "Please either provide an element of LoadResult.class or create a new processor for InvalidTestResult.class!" - } def "The list of eligible entity classes for a ResultEntityProcessor should be valid"() { @@ -310,6 +300,5 @@ class ResultEntityProcessorTest extends Specification { super(uuid, time, inputModel) } } - } diff --git a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy index ccd738a5e..62491ebc6 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy @@ -8,6 +8,8 @@ package edu.ie3.datamodel.io.sink import edu.ie3.datamodel.models.result.system.EmResult import edu.ie3.datamodel.models.result.system.FlexOptionsResult +import java.nio.file.Path + import static edu.ie3.util.quantities.PowerSystemUnits.KILOVOLTAMPERE import static tech.units.indriya.unit.Units.PERCENT import static edu.ie3.util.quantities.PowerSystemUnits.DEGREE_GEOM @@ -62,43 +64,16 @@ import javax.measure.quantity.Power class CsvFileSinkTest extends Specification implements TimeSeriesTestData { @Shared - String testBaseFolderPath = "test" + Path testBaseFolderPath = Path.of("test") // called automatically by spock (see http://spockframework.org/spock/docs/1.0/spock_primer.html - Fixture Methods) def cleanup() { // delete files after each test if they exist - if (new File(testBaseFolderPath).exists()) { + if (testBaseFolderPath.toFile().exists()) { FileIOUtils.deleteRecursively(testBaseFolderPath) } } - def "A valid CsvFileSink called by simple constructor should not initialize files by default and consist of several default values"() { - given: - CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath) - csvFileSink.shutdown() - - expect: - !new File(testBaseFolderPath).exists() - csvFileSink.csvSep == "," - } - - def "A valid CsvFileSink with 'initFiles' enabled should create files as expected"() { - given: - CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, - new ProcessorProvider([ - new ResultEntityProcessor(PvResult), - new ResultEntityProcessor(EvResult) - ], [] as Map), - new FileNamingStrategy(), - true, - ",") - csvFileSink.shutdown() - - expect: - new File(testBaseFolderPath).exists() - new File(testBaseFolderPath + File.separator + "ev_res.csv").exists() - new File(testBaseFolderPath + File.separator + "pv_res.csv").exists() - } def "A valid CsvFileSink is able to convert an entity data map correctly to RFC 4180 compliant strings"() { given: @@ -140,7 +115,7 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { csvFileSink.shutdown() } - def "A valid CsvFileSink without 'initFiles' should only persist provided elements correctly but not init all files"() { + def "A valid CsvFileSink should persist provided elements correctly"() { given: CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, new ProcessorProvider([ @@ -166,7 +141,6 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { new InputEntityProcessor(EmInput) ], [] as Map), new FileNamingStrategy(), - false, ",") UUID uuid = UUID.fromString("22bea5fc-2cb2-4c61-beb9-b476e0107f52") @@ -202,27 +176,27 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { csvFileSink.shutdown() then: - new File(testBaseFolderPath).exists() - new File(testBaseFolderPath + File.separator + "wec_res.csv").exists() - new File(testBaseFolderPath + File.separator + "pv_res.csv").exists() - new File(testBaseFolderPath + File.separator + "evcs_res.csv").exists() - new File(testBaseFolderPath + File.separator + "em_res.csv").exists() - new File(testBaseFolderPath + File.separator + "flex_options_res.csv").exists() - new File(testBaseFolderPath + File.separator + "transformer_2_w_type_input.csv").exists() - new File(testBaseFolderPath + File.separator + "node_input.csv").exists() - new File(testBaseFolderPath + File.separator + "transformer_2_w_input.csv").exists() - new File(testBaseFolderPath + File.separator + "operator_input.csv").exists() - new File(testBaseFolderPath + File.separator + "cylindrical_storage_input.csv").exists() - new File(testBaseFolderPath + File.separator + "line_graphic_input.csv").exists() - new File(testBaseFolderPath + File.separator + "line_input.csv").exists() - new File(testBaseFolderPath + File.separator + "operator_input.csv").exists() - new File(testBaseFolderPath + File.separator + "node_graphic_input.csv").exists() - new File(testBaseFolderPath + File.separator + "thermal_bus_input.csv").exists() - new File(testBaseFolderPath + File.separator + "thermal_house_input.csv").exists() - new File(testBaseFolderPath + File.separator + "load_input.csv").exists() - new File(testBaseFolderPath + File.separator + "em_input.csv").exists() - - !new File(testBaseFolderPath + File.separator + "ev_res.csv").exists() + testBaseFolderPath.toFile().exists() + testBaseFolderPath.resolve("wec_res.csv").toFile().exists() + testBaseFolderPath.resolve("pv_res.csv").toFile().exists() + testBaseFolderPath.resolve("evcs_res.csv").toFile().exists() + testBaseFolderPath.resolve("em_res.csv").toFile().exists() + testBaseFolderPath.resolve("flex_options_res.csv").toFile().exists() + testBaseFolderPath.resolve("transformer_2_w_type_input.csv").toFile().exists() + testBaseFolderPath.resolve("node_input.csv").toFile().exists() + testBaseFolderPath.resolve("transformer_2_w_input.csv").toFile().exists() + testBaseFolderPath.resolve("operator_input.csv").toFile().exists() + testBaseFolderPath.resolve("cylindrical_storage_input.csv").toFile().exists() + testBaseFolderPath.resolve("line_graphic_input.csv").toFile().exists() + testBaseFolderPath.resolve("line_input.csv").toFile().exists() + testBaseFolderPath.resolve("operator_input.csv").toFile().exists() + testBaseFolderPath.resolve("node_graphic_input.csv").toFile().exists() + testBaseFolderPath.resolve("thermal_bus_input.csv").toFile().exists() + testBaseFolderPath.resolve("thermal_house_input.csv").toFile().exists() + testBaseFolderPath.resolve("load_input.csv").toFile().exists() + testBaseFolderPath.resolve("em_input.csv").toFile().exists() + + !testBaseFolderPath.resolve("ev_res.csv").toFile().exists() } def "A valid CsvFileSink should persist a time series correctly"() { @@ -237,7 +211,6 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, new ProcessorProvider([], timeSeriesProcessorMap), new FileNamingStrategy(), - false, ",") when: @@ -245,8 +218,8 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { csvFileSink.shutdown() then: - new File(testBaseFolderPath).exists() - new File(testBaseFolderPath + File.separator + "its_c_a4bbcb77-b9d0-4b88-92be-b9a14a3e332b.csv").exists() + testBaseFolderPath.toFile().exists() + testBaseFolderPath.resolve("its_c_a4bbcb77-b9d0-4b88-92be-b9a14a3e332b.csv").toFile().exists() } def "A valid CsvFileSink persists a bunch of time series correctly"() { @@ -258,15 +231,15 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { csvFileSink.shutdown() then: - new File(testBaseFolderPath).exists() - new File(testBaseFolderPath + File.separator + "its_h_3c0ebc06-9bd7-44ea-a347-0c52d3dec854.csv").exists() - new File(testBaseFolderPath + File.separator + "its_p_b3d93b08-4985-41a6-b063-00f934a10b28.csv").exists() - new File(testBaseFolderPath + File.separator + "its_pq_7d085fc9-be29-4218-b768-00f885be066b.csv").exists() - new File(testBaseFolderPath + File.separator + "its_ph_56c20b88-c001-4225-8dac-cd13a75c6b48.csv").exists() - new File(testBaseFolderPath + File.separator + "its_pqh_83b577cc-06b1-47a1-bfff-ad648a00784b.csv").exists() - new File(testBaseFolderPath + File.separator + "its_c_a4bbcb77-b9d0-4b88-92be-b9a14a3e332b.csv").exists() - new File(testBaseFolderPath + File.separator + "lpts_g2_b56853fe-b800-4c18-b324-db1878b22a28.csv").exists() - new File(testBaseFolderPath + File.separator + "its_weather_4fcbdfcd-4ff0-46dd-b0df-f3af7ae3ed98.csv").exists() + testBaseFolderPath.toFile().exists() + testBaseFolderPath.resolve("its_h_3c0ebc06-9bd7-44ea-a347-0c52d3dec854.csv").toFile().exists() + testBaseFolderPath.resolve("its_p_b3d93b08-4985-41a6-b063-00f934a10b28.csv").toFile().exists() + testBaseFolderPath.resolve("its_pq_7d085fc9-be29-4218-b768-00f885be066b.csv").toFile().exists() + testBaseFolderPath.resolve("its_ph_56c20b88-c001-4225-8dac-cd13a75c6b48.csv").toFile().exists() + testBaseFolderPath.resolve("its_pqh_83b577cc-06b1-47a1-bfff-ad648a00784b.csv").toFile().exists() + testBaseFolderPath.resolve("its_c_a4bbcb77-b9d0-4b88-92be-b9a14a3e332b.csv").toFile().exists() + testBaseFolderPath.resolve("lpts_g2_b56853fe-b800-4c18-b324-db1878b22a28.csv").toFile().exists() + testBaseFolderPath.resolve("its_weather_4fcbdfcd-4ff0-46dd-b0df-f3af7ae3ed98.csv").toFile().exists() } def "A valid CsvFileSink is able to persist an InputEntity without persisting the nested elements"() { @@ -294,9 +267,9 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { csvFileSink.persistIgnoreNested(nestedInput) then: - new File(testBaseFolderPath).exists() - new File(testBaseFolderPath + File.separator + "pv_input.csv").exists() - !(new File(testBaseFolderPath + File.separator + "node_input.csv").exists()) + testBaseFolderPath.toFile().exists() + testBaseFolderPath.resolve("pv_input.csv").toFile().exists() + !testBaseFolderPath.resolve("node_input.csv").toFile().exists() cleanup: csvFileSink.shutdown() @@ -311,14 +284,13 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { ProcessorProvider.allEntityProcessors(), new HashMap, Value>, TimeSeriesEntry, Value>>()), new FileNamingStrategy(), - false, ",") when: csvFileSink.persist(individualEnergyPriceTimeSeries) then: - !(new File(testBaseFolderPath + File.separator + "its_a4bbcb77-b9d0-4b88-92be-b9a14a3e332b.csv").exists()) + !testBaseFolderPath.resolve("its_a4bbcb77-b9d0-4b88-92be-b9a14a3e332b.csv").toFile().exists() cleanup: csvFileSink.shutdown() @@ -331,23 +303,22 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { testBaseFolderPath, new ProcessorProvider(), new FileNamingStrategy(), - false, ",") when: csvFileSink.persistJointGrid(SampleJointGrid.grid()) then: - new File(testBaseFolderPath + File.separator + "line_input.csv").exists() - new File(testBaseFolderPath + File.separator + "line_type_input.csv").exists() - new File(testBaseFolderPath + File.separator + "load_input.csv").exists() - new File(testBaseFolderPath + File.separator + "node_input.csv").exists() - new File(testBaseFolderPath + File.separator + "operator_input.csv").exists() - new File(testBaseFolderPath + File.separator + "pv_input.csv").exists() - new File(testBaseFolderPath + File.separator + "storage_input.csv").exists() - new File(testBaseFolderPath + File.separator + "storage_type_input.csv").exists() - new File(testBaseFolderPath + File.separator + "transformer_2_w_input.csv").exists() - new File(testBaseFolderPath + File.separator + "transformer_2_w_type_input.csv").exists() + testBaseFolderPath.resolve("line_input.csv").toFile().exists() + testBaseFolderPath.resolve("line_type_input.csv").toFile().exists() + testBaseFolderPath.resolve("load_input.csv").toFile().exists() + testBaseFolderPath.resolve( "node_input.csv").toFile().exists() + testBaseFolderPath.resolve("operator_input.csv").toFile().exists() + testBaseFolderPath.resolve("pv_input.csv").toFile().exists() + testBaseFolderPath.resolve("storage_input.csv").toFile().exists() + testBaseFolderPath.resolve("storage_type_input.csv").toFile().exists() + testBaseFolderPath.resolve("transformer_2_w_input.csv").toFile().exists() + testBaseFolderPath.resolve("transformer_2_w_type_input.csv").toFile().exists() cleanup: csvFileSink.shutdown() diff --git a/src/test/groovy/edu/ie3/datamodel/io/sink/InfluxDbSinkIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/sink/InfluxDbSinkIT.groovy index 811eb736d..4b5c57905 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/sink/InfluxDbSinkIT.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/sink/InfluxDbSinkIT.groovy @@ -61,7 +61,7 @@ class InfluxDbSinkIT extends Specification { when: def connector = new InfluxDbConnector(influxDbContainer.url,"test_weather", "test_scenario") then: - connector.connectionValid + connector.isConnectionValid() } def "An InfluxDbSink can persist a ResultEntity"() { diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/EntitySourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/EntitySourceTest.groovy new file mode 100644 index 000000000..b2881dbdf --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/source/EntitySourceTest.groovy @@ -0,0 +1,106 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.source + +import edu.ie3.datamodel.io.factory.input.ThermalBusInputFactory +import edu.ie3.datamodel.io.naming.FileNamingStrategy +import edu.ie3.datamodel.io.source.csv.CsvDataSource +import edu.ie3.datamodel.models.input.OperatorInput +import edu.ie3.datamodel.models.input.thermal.ThermalBusInput +import edu.ie3.test.common.SystemParticipantTestData as sptd +import edu.ie3.test.common.GridTestData as gtd + +import spock.lang.Shared +import spock.lang.Specification + +import java.nio.file.Path + +class EntitySourceTest extends Specification { + + private final class DummyEntitySource extends EntitySource { + DummyEntitySource(CsvDataSource dataSource) { + this.dataSource = dataSource + } + } + + @Shared + String csvSep = "," + @Shared + Path testBaseFolderPath = Path.of("testBaseFolderPath") // does not have to exist for this test + @Shared + FileNamingStrategy fileNamingStrategy = new FileNamingStrategy() + + CsvDataSource csvDataSource = new CsvDataSource(csvSep, testBaseFolderPath, fileNamingStrategy) + + DummyEntitySource dummyEntitySource = new DummyEntitySource(csvDataSource) + + def "A csv data source is able to find the correct first entity by uuid"() { + given: + def uuid = UUID.randomUUID() + def queriedOperator = new OperatorInput(uuid, "b") + def entities = Arrays.asList( + new OperatorInput(UUID.randomUUID(), "a"), + queriedOperator, + new OperatorInput(UUID.randomUUID(), "c") + ) + + when: + def actual = dummyEntitySource.findFirstEntityByUuid(uuid.toString(), entities) + + then: + actual.present + actual.get() == queriedOperator + } + + def "A CsvDataSource should always return an operator. Either the found one (if any) or OperatorInput.NO_OPERATOR_ASSIGNED"() { + + expect: + dummyEntitySource.getFirstOrDefaultOperator(operators, operatorUuid, entityClassName, requestEntityUuid) == expectedOperator + + where: + operatorUuid | operators | entityClassName | requestEntityUuid || expectedOperator + "8f9682df-0744-4b58-a122-f0dc730f6510" | [sptd.hpInput.operator] | "TestEntityClass" | "8f9682df-0744-4b58-a122-f0dc730f6511" || sptd.hpInput.operator + "8f9682df-0744-4b58-a122-f0dc730f6520" | [sptd.hpInput.operator] | "TestEntityClass" | "8f9682df-0744-4b58-a122-f0dc730f6511" || OperatorInput.NO_OPERATOR_ASSIGNED + "8f9682df-0744-4b58-a122-f0dc730f6510" | [] | "TestEntityClass" | "8f9682df-0744-4b58-a122-f0dc730f6511" || OperatorInput.NO_OPERATOR_ASSIGNED + } + + def "A CsvDataSource should be able to handle the extraction process of an asset type correctly"() { + when: + def assetTypeOpt = dummyEntitySource.getAssetType(types, fieldsToAttributes, "TestClassName") + + then: + assetTypeOpt.data.present == resultIsPresent + assetTypeOpt.data.ifPresent({ assetType -> + assert (assetType == resultData) + }) + + where: + types | fieldsToAttributes || resultIsPresent || resultData + [] | ["type": "202069a7-bcf8-422c-837c-273575220c8a"] || false || null + [] | ["bla": "foo"] || false || null + [gtd.transformerTypeBtoD] | ["type": "202069a7-bcf8-422c-837c-273575220c8a"] || true || gtd.transformerTypeBtoD + [sptd.chpTypeInput] | ["type": "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8"] || true || sptd.chpTypeInput + } + + def "A CsvDataSource should not throw an exception but assume NO_OPERATOR_ASSIGNED if the operator field is missing in the headline"() { + + given: + def thermalBusInputFieldsToAttributesMap = [ + "uuid" : "0d95d7f2-49fb-4d49-8636-383a5220384e", + "id" : "test_thermalBusInput", + "operatesuntil": "2020-03-25T15:11:31Z[UTC]", + "operatesfrom" : "2020-03-24T15:11:31Z[UTC]" + ] + + when: + def thermalBusInputEntity = new ThermalBusInputFactory().get(dummyEntitySource.assetInputEntityDataStream(ThermalBusInput, thermalBusInputFieldsToAttributesMap, Collections.emptyList())) + + then: + noExceptionThrown() // no NPE should be thrown + thermalBusInputEntity.success + thermalBusInputEntity.data.get().operator.id == OperatorInput.NO_OPERATOR_ASSIGNED.id // operator id should be set accordingly + } +} diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/IdCoordinateSourceMock.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/IdCoordinateSourceMock.groovy new file mode 100644 index 000000000..0e2e4a8da --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/source/IdCoordinateSourceMock.groovy @@ -0,0 +1,45 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.source + +import edu.ie3.util.geo.CoordinateDistance +import org.locationtech.jts.geom.Point +import tech.units.indriya.ComparableQuantity + +import javax.measure.quantity.Length + +class IdCoordinateSourceMock implements IdCoordinateSource { + + @Override + Optional getCoordinate(int id) { + return Optional.empty() + } + + @Override + Collection getCoordinates(int ... ids) { + return Collections.emptyList() + } + + @Override + Optional getId(Point coordinate) { + return Optional.empty() + } + + @Override + Collection getAllCoordinates() { + return Collections.emptyList() + } + + @Override + List getNearestCoordinates(Point coordinate, int n) { + return Collections.emptyList() + } + + @Override + List getClosestCoordinates(Point coordinate, int n, ComparableQuantity distance) { + return Collections.emptyList() + } +} diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/IdCoordinateSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/IdCoordinateSourceTest.groovy new file mode 100644 index 000000000..833a47c48 --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/source/IdCoordinateSourceTest.groovy @@ -0,0 +1,74 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.source + +import edu.ie3.util.geo.CoordinateDistance +import edu.ie3.util.geo.GeoUtils +import org.locationtech.jts.geom.Point +import spock.lang.Specification + +class IdCoordinateSourceTest extends Specification { + private final IdCoordinateSourceMock coordinateSourceMock = new IdCoordinateSourceMock() + + private final Point point0 = GeoUtils.buildPoint(52.5, 7.5) + private final Point point1 = GeoUtils.buildPoint(53, 8) + private final Point point2 = GeoUtils.buildPoint(53, 7) + private final Point point3 = GeoUtils.buildPoint(53, 6) + private final Point point4 = GeoUtils.buildPoint(52, 8) + private final Point point5 = GeoUtils.buildPoint(52, 7) + private final Point point6 = GeoUtils.buildPoint(52, 6) + private final Point point7 = GeoUtils.buildPoint(51, 8) + private final Point point8 = GeoUtils.buildPoint(51, 7) + private final Point point9 = GeoUtils.buildPoint(51, 6) + + private final List points = [ + point1, + point2, + point3, + point4, + point5, + point6, + point7, + point8, + point9 + ] + + def "IdCoordinateSource should return correct number of corner points restricted to the bounding box"() { + given: + List expectedPoints = [ + point2, + point4, + point5, + point6, + point8 + ] + + when: + List distances = coordinateSourceMock.calculateCoordinateDistances(point0, 9, points) + List result = coordinateSourceMock.restrictToBoundingBox(point0, distances, 4) + + then: + for (CoordinateDistance value: result) { + expectedPoints.contains(value.coordinateB) + } + } + + def "IdCoordinateSource should return only one point of the bounding box if the starting coordinate exactly matched the found coordinate"() { + given: + Point matchingPoint = GeoUtils.buildPoint(52.5, 7.5) + + when: + List withExactMatch = new ArrayList<>(points) + withExactMatch.addAll(matchingPoint) + + List distances = coordinateSourceMock.calculateCoordinateDistances(point0, 9, withExactMatch) + List result = coordinateSourceMock.restrictToBoundingBox(point0, distances, 4) + + then: + result.size() == 1 + result.get(0).coordinateB == matchingPoint + } +} diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy index b4a77c280..e6c58dd97 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy @@ -6,16 +6,14 @@ package edu.ie3.datamodel.io.source.csv import edu.ie3.datamodel.io.naming.FileNamingStrategy -import edu.ie3.datamodel.io.factory.input.ThermalBusInputFactory import edu.ie3.datamodel.models.UniqueEntity import edu.ie3.datamodel.models.input.NodeInput -import edu.ie3.datamodel.models.input.OperatorInput -import edu.ie3.datamodel.models.input.thermal.ThermalBusInput -import edu.ie3.test.common.GridTestData as gtd import edu.ie3.test.common.SystemParticipantTestData as sptd + import spock.lang.Shared import spock.lang.Specification +import java.nio.file.Path import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.atomic.LongAdder import java.util.function.Function @@ -31,7 +29,7 @@ class CsvDataSourceTest extends Specification { // methods in a public or protected method makes them available for testing private final class DummyCsvSource extends CsvDataSource { - DummyCsvSource(String csvSep, String folderPath, FileNamingStrategy fileNamingStrategy) { + DummyCsvSource(String csvSep, Path folderPath, FileNamingStrategy fileNamingStrategy) { super(csvSep, folderPath, fileNamingStrategy) } @@ -40,11 +38,6 @@ class CsvDataSourceTest extends Specification { return super.buildFieldsToAttributes(csvRow, headline) } - OperatorInput getFirstOrDefaultOperator( - Collection operators, String operatorUuid, String entityClassName, String requestEntityUuid) { - return super.getFirstOrDefaultOperator(operators, operatorUuid, entityClassName, requestEntityUuid) - } - def Set> distinctRowsWithLog( Class entityClass, Collection> allRows) { return super.distinctRowsWithLog(allRows, uuidExtractor, entityClass.simpleName, "UUID") @@ -64,38 +57,19 @@ class CsvDataSourceTest extends Specification { @Shared String csvSep = "," @Shared - String testBaseFolderPath = "testBaseFolderPath" // does not have to exist for this test + Path testBaseFolderPath = Path.of("testBaseFolderPath") // does not have to exist for this test @Shared FileNamingStrategy fileNamingStrategy = new FileNamingStrategy() @Shared DummyCsvSource dummyCsvSource = new DummyCsvSource(csvSep, testBaseFolderPath, fileNamingStrategy) - def "A csv data source is able to find the correct first entity by uuid"() { - given: - def uuid = UUID.randomUUID() - def queriedOperator = new OperatorInput(uuid, "b") - def entities = Arrays.asList( - new OperatorInput(UUID.randomUUID(), "a"), - queriedOperator, - new OperatorInput(UUID.randomUUID(), "c") - ) - - when: - def actual = dummyCsvSource.findFirstEntityByUuid(uuid.toString(), entities) - - then: - actual.present - actual.get() == queriedOperator - } - def "A DataSource should contain a valid connector after initialization"() { expect: dummyCsvSource.connector != null dummyCsvSource.connector.baseDirectoryName == testBaseFolderPath dummyCsvSource.connector.fileNamingStrategy == fileNamingStrategy dummyCsvSource.connector.entityWriters.isEmpty() - } def "A CsvDataSource should build a valid fields to attributes map with valid data as expected"() { @@ -127,7 +101,6 @@ class CsvDataSourceTest extends Specification { olmcharacteristic : "olm:{(0.0,1.0)}", cosPhiFixed : "cosPhiFixed:{(0.0,1.0)}" ] - } def "A CsvDataSource should be able to handle deprecated invalid csvRows correctly"() { @@ -358,7 +331,6 @@ class CsvDataSourceTest extends Specification { olmcharacteristic : "olm:{(0.0,1.0)}", cosPhiFixed : "" ] - } def "A CsvDataSource should be able to handle several errors when the csvRow is invalid or cannot be processed"() { @@ -382,20 +354,6 @@ class CsvDataSourceTest extends Specification { "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8;25.0;100.0;0.95;98.0;test_bmTypeInput;50.0;25.0" || "wrong separator" "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,25.0,100.0,0.95,98.0,test_bmTypeInput" || "too less columns" "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,25.0,100.0,0.95,98.0,test_bmTypeInput,,,," || "too much columns" - - } - - def "A CsvDataSource should always return an operator. Either the found one (if any) or OperatorInput.NO_OPERATOR_ASSIGNED"() { - - expect: - dummyCsvSource.getFirstOrDefaultOperator(operators, operatorUuid, entityClassName, requestEntityUuid) == expectedOperator - - where: - operatorUuid | operators | entityClassName | requestEntityUuid || expectedOperator - "8f9682df-0744-4b58-a122-f0dc730f6510" | [sptd.hpInput.operator]| "TestEntityClass" | "8f9682df-0744-4b58-a122-f0dc730f6511" || sptd.hpInput.operator - "8f9682df-0744-4b58-a122-f0dc730f6520" | [sptd.hpInput.operator]| "TestEntityClass" | "8f9682df-0744-4b58-a122-f0dc730f6511" || OperatorInput.NO_OPERATOR_ASSIGNED - "8f9682df-0744-4b58-a122-f0dc730f6510" | []| "TestEntityClass" | "8f9682df-0744-4b58-a122-f0dc730f6511" || OperatorInput.NO_OPERATOR_ASSIGNED - } def "A CsvDataSource should collect be able to collect empty optionals when asked to do so"() { @@ -459,7 +417,6 @@ class CsvDataSourceTest extends Specification { "v_target" : "1.0", "volt_lvl" : "Höchstspannung", "v_rated" : "380"] - } def "A CsvDataSource should return an empty set of csv row mappings if the provided collection of mappings contains duplicated UUIDs with different data"() { @@ -499,43 +456,4 @@ class CsvDataSourceTest extends Specification { then: distinctRows.size() == 0 } - - def "A CsvDataSource should be able to handle the extraction process of an asset type correctly"() { - - when: - def assetTypeOpt = dummyCsvSource.getAssetType(types, fieldsToAttributes, "TestClassName") - - then: - assetTypeOpt.present == resultIsPresent - assetTypeOpt.ifPresent({ assetType -> - assert (assetType == resultData) - }) - - where: - types | fieldsToAttributes || resultIsPresent || resultData - []| ["type": "202069a7-bcf8-422c-837c-273575220c8a"] || false || null - []| ["bla": "foo"] || false || null - [gtd.transformerTypeBtoD]| ["type": "202069a7-bcf8-422c-837c-273575220c8a"] || true || gtd.transformerTypeBtoD - [sptd.chpTypeInput]| ["type": "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8"] || true || sptd.chpTypeInput - } - - def "A CsvDataSource should not throw an exception but assume NO_OPERATOR_ASSIGNED if the operator field is missing in the headline"() { - - given: - def thermalBusInputFieldsToAttributesMap = [ - "uuid" : "0d95d7f2-49fb-4d49-8636-383a5220384e", - "id" : "test_thermalBusInput", - "operatesuntil": "2020-03-25T15:11:31Z[UTC]", - "operatesfrom" : "2020-03-24T15:11:31Z[UTC]" - ] - - when: - def thermalBusInputEntity = new ThermalBusInputFactory().get(dummyCsvSource.assetInputEntityDataStream(ThermalBusInput, thermalBusInputFieldsToAttributesMap, Collections.emptyList())) - - then: - noExceptionThrown() // no NPE should be thrown - thermalBusInputEntity.present - thermalBusInputEntity.get().operator.id == OperatorInput.NO_OPERATOR_ASSIGNED.id // operator id should be set accordingly - } - } diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvGraphicSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvGraphicSourceTest.groovy index 763f4988e..38f15b346 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvGraphicSourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvGraphicSourceTest.groovy @@ -5,11 +5,16 @@ */ package edu.ie3.datamodel.io.source.csv +import edu.ie3.datamodel.exceptions.SourceException import edu.ie3.datamodel.io.factory.input.graphics.LineGraphicInputEntityData import edu.ie3.datamodel.io.factory.input.graphics.NodeGraphicInputEntityData +import edu.ie3.datamodel.io.source.GraphicSource +import edu.ie3.datamodel.io.source.RawGridSource +import edu.ie3.datamodel.io.source.TypeSource import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.common.GridTestData as gtd import org.locationtech.jts.geom.LineString import org.locationtech.jts.geom.Point @@ -20,65 +25,70 @@ class CsvGraphicSourceTest extends Specification implements CsvTestDataMeta { def "A CsvGraphicSource should provide an instance of GraphicElements based on valid input data correctly"() { given: - def typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) - def rawGridSource = new CsvRawGridSource(csvSep, gridDefaultFolderPath, fileNamingStrategy, typeSource) - def csvGraphicSource = new CsvGraphicSource(csvSep, graphicsFolderPath, fileNamingStrategy, typeSource, rawGridSource) + def typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, fileNamingStrategy)) + def rawGridSource = new RawGridSource(typeSource, new CsvDataSource(csvSep, gridDefaultFolderPath, fileNamingStrategy)) + def csvGraphicSource = new GraphicSource(typeSource, rawGridSource, new CsvDataSource(csvSep, graphicsFolderPath, fileNamingStrategy)) when: - def graphicElementsOpt = csvGraphicSource.getGraphicElements() + def graphicElements = csvGraphicSource.graphicElements then: - graphicElementsOpt.present - graphicElementsOpt.ifPresent({ - assert (it.allEntitiesAsList().size() == 3) - assert (it.nodeGraphics.size() == 2) - assert (it.lineGraphics.size() == 1) - }) + graphicElements.allEntitiesAsList().size() == 3 + graphicElements.nodeGraphics.size() == 2 + graphicElements.lineGraphics.size() == 1 } def "A CsvGraphicSource should process invalid input data as expected when requested to provide an instance of GraphicElements"() { given: - def typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) + def typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, fileNamingStrategy)) def rawGridSource = - new CsvRawGridSource(csvSep, gridDefaultFolderPath, fileNamingStrategy, typeSource) { - @Override - Set getNodes() { - return Collections.emptySet() - } + new RawGridSource(typeSource, new CsvDataSource(csvSep, gridDefaultFolderPath, fileNamingStrategy)) { + @Override + Set getNodes() { + return Collections.emptySet() + } - @Override - Set getNodes(Set operators) { - return Collections.emptySet() - } - } + @Override + Set getNodes(Set operators) { + return Collections.emptySet() + } + } - def csvGraphicSource = new CsvGraphicSource(csvSep, graphicsFolderPath, fileNamingStrategy, typeSource, rawGridSource) + def csvGraphicSource = new GraphicSource(typeSource, rawGridSource, new CsvDataSource(csvSep, graphicsFolderPath, fileNamingStrategy)) when: - def graphicElementsOpt = csvGraphicSource.getGraphicElements() + def graphicElements = Try.of(() -> csvGraphicSource.graphicElements, SourceException) then: - !graphicElementsOpt.present + graphicElements.failure + graphicElements.data == Optional.empty() + + Exception ex = graphicElements.exception.get() + ex.class == SourceException + ex.message.startsWith("edu.ie3.datamodel.exceptions.FailureException: 2 exception(s) occurred within \"LineInput\" data, one is: edu.ie3.datamodel.exceptions.FactoryException: edu.ie3.datamodel.exceptions.SourceException: Failure due to: Skipping LineInput with uuid") } def "A CsvGraphicSource should read and handle a valid node graphics file as expected"() { given: - def csvGraphicSource = new CsvGraphicSource(csvSep, graphicsFolderPath, fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvRawGridSource)) + def csvGraphicSource = new GraphicSource( + Mock(TypeSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, graphicsFolderPath, fileNamingStrategy)) def expectedNodeGraphicD = new NodeGraphicInput( - gtd.nodeGraphicD.uuid, - gtd.nodeGraphicD.graphicLayer, - gtd.nodeGraphicD.path, - gtd.nodeD, - gtd.geoJsonReader.read("{ \"type\": \"Point\", \"coordinates\": [7.4116482, 51.4843281] }") as Point - ) + gtd.nodeGraphicD.uuid, + gtd.nodeGraphicD.graphicLayer, + gtd.nodeGraphicD.path, + gtd.nodeD, + gtd.geoJsonReader.read("{ \"type\": \"Point\", \"coordinates\": [7.4116482, 51.4843281] }") as Point + ) def expectedNodeGraphicC = new NodeGraphicInput( - gtd.nodeGraphicC.uuid, - gtd.nodeGraphicC.graphicLayer, - gtd.geoJsonReader.read("{ \"type\": \"LineString\", \"coordinates\": [[7.4116482, 51.4843281], [7.4116482, 51.4843281]]}") as LineString, - gtd.nodeC, - gtd.nodeGraphicC.point - ) + gtd.nodeGraphicC.uuid, + gtd.nodeGraphicC.graphicLayer, + gtd.geoJsonReader.read("{ \"type\": \"LineString\", \"coordinates\": [[7.4116482, 51.4843281], [7.4116482, 51.4843281]]}") as LineString, + gtd.nodeC, + gtd.nodeGraphicC.point + ) when: def nodeGraphics = csvGraphicSource.getNodeGraphicInput([gtd.nodeC, gtd.nodeD] as Set) @@ -93,7 +103,10 @@ class CsvGraphicSourceTest extends Specification implements CsvTestDataMeta { def "A CsvGraphicSource should read and handle a valid line graphics file as expected"() { given: - def csvGraphicSource = new CsvGraphicSource(csvSep, graphicsFolderPath, fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvRawGridSource)) + def csvGraphicSource = new GraphicSource( + Mock(TypeSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, graphicsFolderPath, fileNamingStrategy)) when: def lineGraphics = csvGraphicSource.getLineGraphicInput([gtd.lineCtoD] as Set) @@ -105,7 +118,10 @@ class CsvGraphicSourceTest extends Specification implements CsvTestDataMeta { def "A CsvGraphicSource should build node graphic entity data from valid and invalid input data correctly"() { given: - def csvGraphicSource = new CsvGraphicSource(csvSep, graphicsFolderPath, fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvRawGridSource)) + def csvGraphicSource = new GraphicSource( + Mock(TypeSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, graphicsFolderPath, fileNamingStrategy)) def fieldsToAttributesMap = [ "uuid" : "09aec636-791b-45aa-b981-b14edf171c4c", "graphic_layer": "main", @@ -116,9 +132,11 @@ class CsvGraphicSourceTest extends Specification implements CsvTestDataMeta { expect: def res = csvGraphicSource.buildNodeGraphicEntityData(fieldsToAttributesMap, nodeCollection as Set) - res.present == isPresent + res.success == isPresent + + if (isPresent) { + def value = res.data.get() - res.ifPresent({ value -> assert value == new NodeGraphicInputEntityData([ "uuid" : "09aec636-791b-45aa-b981-b14edf171c4c", "graphic_layer": "main", @@ -126,20 +144,21 @@ class CsvGraphicSourceTest extends Specification implements CsvTestDataMeta { "point" : "{\"type\":\"Point\",\"coordinates\":[0.0,10],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}" ], gtd.nodeC) assert value.node == gtd.nodeC - }) - + } where: nodeCollection || isPresent []|| false // no nodes provide [gtd.nodeA, gtd.nodeB]|| false // node cannot be found [gtd.nodeC]|| true // node found - } def "A CsvGraphicSource should build line graphic entity data from valid and invalid input data correctly"() { given: - def csvGraphicSource = new CsvGraphicSource(csvSep, graphicsFolderPath, fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvRawGridSource)) + def csvGraphicSource = new GraphicSource( + Mock(TypeSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, graphicsFolderPath, fileNamingStrategy)) def fieldsToAttributesMap = [ "uuid" : "ece86139-3238-4a35-9361-457ecb4258b0", "graphic_layer": "main", @@ -149,16 +168,18 @@ class CsvGraphicSourceTest extends Specification implements CsvTestDataMeta { expect: def res = csvGraphicSource.buildLineGraphicEntityData(fieldsToAttributesMap, nodeCollection as Set) - res.present == isPresent + res.success == isPresent + + if (isPresent) { + def value = res.data.get() - res.ifPresent({ value -> assert value == new LineGraphicInputEntityData(["uuid" : "ece86139-3238-4a35-9361-457ecb4258b0", "graphic_layer": "main", "path" : "{\"type\":\"LineString\",\"coordinates\":[[0.0,0.0],[0.0,10]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}" ] , gtd.lineAtoB) assert value.line == gtd.lineAtoB - }) + } where: @@ -166,6 +187,5 @@ class CsvGraphicSourceTest extends Specification implements CsvTestDataMeta { []|| false // no nodes provide [gtd.lineCtoD]|| false // line cannot be found [gtd.lineAtoB]|| true // line found - } -} +} \ No newline at end of file diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSourceCosmoIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSourceCosmoIT.groovy index 8dc676837..0a295421c 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSourceCosmoIT.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSourceCosmoIT.groovy @@ -10,6 +10,8 @@ import edu.ie3.util.geo.CoordinateDistance import edu.ie3.util.geo.GeoUtils import spock.lang.Shared import spock.lang.Specification +import tech.units.indriya.quantity.Quantities +import tech.units.indriya.unit.Units import java.util.stream.Collectors import java.util.stream.Stream @@ -20,7 +22,7 @@ class CsvIdCoordinateSourceCosmoIT extends Specification implements CsvTestDataM CsvIdCoordinateSource source def setupSpec() { - source = new CsvIdCoordinateSource(csvSep, coordinatesCosmoFolderPath, fileNamingStrategy, new CosmoIdCoordinateFactory()) + source = new CsvIdCoordinateSource(new CosmoIdCoordinateFactory(), new CsvDataSource(csvSep, coordinatesCosmoFolderPath, fileNamingStrategy)) } def "The CsvCoordinateSource is able to create a valid stream from a coordinate file"() { @@ -132,18 +134,17 @@ class CsvIdCoordinateSourceCosmoIT extends Specification implements CsvTestDataM ].sort() when: - def actualDistances = source.getNearestCoordinates(basePoint, 2, allCoordinates) + def actualDistances = source.calculateCoordinateDistances(basePoint, 2, allCoordinates) then: actualDistances == expectedDistances } - def "If no collection is given, the CsvIdCoordinateSource is able to return the nearest n coordinates of all available coordinates" () { + def "The CsvIdCoordinateSource will return the nearest n coordinates" () { given: - def n = 2 - def allCoordinates = source.allCoordinates + def n = 5 def basePoint = GeoUtils.buildPoint(39.617162, 1.438029) - def expectedDistances = source.getNearestCoordinates(basePoint, n, allCoordinates) + def expectedDistances = source.calculateCoordinateDistances(basePoint, n, source.allCoordinates) when: def actualDistances = source.getNearestCoordinates(basePoint, n) @@ -151,4 +152,41 @@ class CsvIdCoordinateSourceCosmoIT extends Specification implements CsvTestDataM then: actualDistances == expectedDistances } + + def "The CsvIdCoordinateSource will return no coordinates if no coordinates are in the given radius" () { + given: + def n = 5 + def basePoint = GeoUtils.buildPoint(37.617162, 1.438029) + def distance = Quantities.getQuantity(100, Units.METRE) + + when: + def actualDistances = source.getClosestCoordinates(basePoint, n, distance) + + then: + actualDistances.empty + } + + def "The CsvIdCoordinateSource will return the nearest n coordinates if n coordinates are in the search radius"() { + given: + def basePoint = GeoUtils.buildPoint(39.617162, 1.438029) + def distance = Quantities.getQuantity(10000, Units.METRE) + + when: + def actualDistances = source.getClosestCoordinates(basePoint, 3, distance) + + then: + actualDistances.size() == 3 + } + + def "The CsvIdCoordinateSource will return the nearest m coordinates if less than n coordinates are in the given radius"() { + given: + def basePoint = GeoUtils.buildPoint(39.617162, 1.438029) + def distance = Quantities.getQuantity(1000, Units.METRE) + + when: + def actualDistances = source.getClosestCoordinates(basePoint, 2, distance) + + then: + actualDistances.size() == 1 + } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSourceIconIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSourceIconIT.groovy index 6193c477f..484d3a4ba 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSourceIconIT.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSourceIconIT.groovy @@ -10,6 +10,8 @@ import edu.ie3.util.geo.CoordinateDistance import edu.ie3.util.geo.GeoUtils import spock.lang.Shared import spock.lang.Specification +import tech.units.indriya.quantity.Quantities +import tech.units.indriya.unit.Units import java.util.stream.Collectors import java.util.stream.Stream @@ -20,7 +22,7 @@ class CsvIdCoordinateSourceIconIT extends Specification implements CsvTestDataMe CsvIdCoordinateSource source def setupSpec() { - source = new CsvIdCoordinateSource(csvSep, coordinatesIconFolderPath, fileNamingStrategy, new IconIdCoordinateFactory()) + source = new CsvIdCoordinateSource(new IconIdCoordinateFactory(), new CsvDataSource(csvSep, coordinatesIconFolderPath, fileNamingStrategy)) } def "The CsvCoordinateSource is able to create a valid stream from a coordinate file"() { @@ -133,18 +135,17 @@ class CsvIdCoordinateSourceIconIT extends Specification implements CsvTestDataMe ].sort() when: - def actualDistances = source.getNearestCoordinates(basePoint, 2, allCoordinates) + def actualDistances = source.calculateCoordinateDistances(basePoint, 2, allCoordinates) then: actualDistances == expectedDistances } - def "If no collection is given, the CsvIdCoordinateSource is able to return the nearest n coordinates of all available coordinates" () { + def "The CsvIdCoordinateSource will return the nearest n coordinates" () { given: - def n = 2 - def allCoordinates = source.allCoordinates + def n = 5 def basePoint = GeoUtils.buildPoint(39.617162, 1.438029) - def expectedDistances = source.getNearestCoordinates(basePoint, n, allCoordinates) + def expectedDistances = source.calculateCoordinateDistances(basePoint, n, source.allCoordinates) when: def actualDistances = source.getNearestCoordinates(basePoint, n) @@ -152,4 +153,41 @@ class CsvIdCoordinateSourceIconIT extends Specification implements CsvTestDataMe then: actualDistances == expectedDistances } + + def "The CsvIdCoordinateSource will return no coordinates if no coordinates are in the given radius" () { + given: + def n = 5 + def basePoint = GeoUtils.buildPoint(39.617162, 1.438029) + def distance = Quantities.getQuantity(10000, Units.METRE) + + when: + def actualDistances = source.getClosestCoordinates(basePoint, n, distance) + + then: + actualDistances.empty + } + + def "The CsvIdCoordinateSource will return the nearest n coordinates if n coordinates are in the search radius"() { + given: + def basePoint = GeoUtils.buildPoint(51.5, 7.38) + def distance = Quantities.getQuantity(10000, Units.METRE) + + when: + def actualDistances = source.getClosestCoordinates(basePoint, 3, distance) + + then: + actualDistances.size() == 3 + } + + def "The CsvIdCoordinateSource will return the nearest m coordinates if less than n coordinates are in the given radius"() { + given: + def basePoint = GeoUtils.buildPoint(51.5, 7.38) + def distance = Quantities.getQuantity(1000, Units.METRE) + + when: + def actualDistances = source.getClosestCoordinates(basePoint, 3, distance) + + then: + actualDistances.size() == 1 + } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvRawGridSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvRawGridSourceTest.groovy index 32a9975f9..5049b4bb7 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvRawGridSourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvRawGridSourceTest.groovy @@ -5,17 +5,20 @@ */ package edu.ie3.datamodel.io.source.csv +import edu.ie3.datamodel.exceptions.SourceException import edu.ie3.datamodel.io.factory.input.AssetInputEntityData import edu.ie3.datamodel.io.factory.input.ConnectorInputEntityData import edu.ie3.datamodel.io.factory.input.Transformer3WInputEntityData import edu.ie3.datamodel.io.factory.input.TypedConnectorInputEntityData +import edu.ie3.datamodel.io.source.RawGridSource +import edu.ie3.datamodel.io.source.TypeSource import edu.ie3.datamodel.models.input.connector.LineInput import edu.ie3.datamodel.models.input.connector.SwitchInput import edu.ie3.datamodel.models.input.connector.Transformer3WInput import edu.ie3.datamodel.models.input.container.RawGridElements +import edu.ie3.datamodel.utils.Try import edu.ie3.test.common.GridTestData import edu.ie3.test.common.GridTestData as rgtd - import spock.lang.Shared import spock.lang.Specification @@ -24,33 +27,33 @@ import java.util.stream.Stream class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { @Shared - CsvRawGridSource source + RawGridSource source def setupSpec() { - CsvTypeSource typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) - source = new CsvRawGridSource(csvSep, gridDefaultFolderPath, fileNamingStrategy, typeSource) + TypeSource typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, fileNamingStrategy)) + source = new RawGridSource(typeSource, new CsvDataSource(csvSep, gridDefaultFolderPath, fileNamingStrategy)) } def "The CsvRawGridSource is able to convert single valid AssetInputEntityData to ConnectorInputEntityData"() { given: "valid input data" def fieldsToAttributes = [ "uuid" : "5dc88077-aeb6-4711-9142-db57287640b1", - "id" : "test_switch_AtoB", + "id" : "test_switch_AtoB", "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", "operatesFrom" : "2020-03-24 15:11:31", "operatesUntil" : "2020-03-24 15:11:31", "nodeA" : "4ca90220-74c2-4369-9afa-a18bf068840d", "nodeB" : "47d29df0-ba2d-4d23-8e75-c82229c5c758", - "closed" : "true" + "closed" : "true" ] def expectedFieldsToAttributes = [ "uuid" : "5dc88077-aeb6-4711-9142-db57287640b1", - "id" : "test_switch_AtoB", + "id" : "test_switch_AtoB", "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", "operatesFrom" : "2020-03-24 15:11:31", "operatesUntil" : "2020-03-24 15:11:31", - "closed" : "true" + "closed" : "true" ] def validAssetEntityInputData = new AssetInputEntityData(fieldsToAttributes, SwitchInput) @@ -61,8 +64,8 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { def connectorDataOption = source.buildUntypedConnectorInputEntityData(validAssetEntityInputData, nodes) then: "everything is fine" - connectorDataOption.present - connectorDataOption.get().with { + connectorDataOption.success + connectorDataOption.data.get().with { assert fieldsToValues == expectedFieldsToAttributes assert targetClass == SwitchInput assert nodeA == rgtd.nodeA @@ -91,42 +94,42 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { def connectorDataOption = source.buildUntypedConnectorInputEntityData(validAssetEntityInputData, nodes) then: "it returns en empty Optional" - !connectorDataOption.present + connectorDataOption.failure } def "The CsvRawGridSource is able to convert a stream of valid AssetInputEntityData to ConnectorInputEntityData"() { given: "valid input data" def validStream = Stream.of( - new AssetInputEntityData([ - "uuid" : "5dc88077-aeb6-4711-9142-db57287640b1", - "id" : "test_switch_AtoB", - "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", - "operatesFrom" : "2020-03-24 15:11:31", - "operatesUntil" : "2020-03-24 15:11:31", - "nodeA" : "4ca90220-74c2-4369-9afa-a18bf068840d", - "nodeB" : "47d29df0-ba2d-4d23-8e75-c82229c5c758", - "closed" : "true" - ], SwitchInput), - new AssetInputEntityData([ - "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", - "id" : "test_lineCtoD", - "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", - "operatesFrom" : "2020-03-24 15:11:31", - "operatesUntil" : "2020-03-24 15:11:31", - "nodeA" : "bd837a25-58f3-44ac-aa90-c6b6e3cd91b2", - "nodeB" : "6e0980e0-10f2-4e18-862b-eb2b7c90509b", - "parallelDevices" : "2", - "type" : "3bed3eb3-9790-4874-89b5-a5434d408088", - "length" : "0.003", - "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", - "olmCharacteristic" : "olm:{(0.0,1.0)}" - ], - LineInput) - ) + new AssetInputEntityData([ + "uuid" : "5dc88077-aeb6-4711-9142-db57287640b1", + "id" : "test_switch_AtoB", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "nodeA" : "4ca90220-74c2-4369-9afa-a18bf068840d", + "nodeB" : "47d29df0-ba2d-4d23-8e75-c82229c5c758", + "closed" : "true" + ], SwitchInput), + new AssetInputEntityData([ + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "test_lineCtoD", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "nodeA" : "bd837a25-58f3-44ac-aa90-c6b6e3cd91b2", + "nodeB" : "6e0980e0-10f2-4e18-862b-eb2b7c90509b", + "parallelDevices" : "2", + "type" : "3bed3eb3-9790-4874-89b5-a5434d408088", + "length" : "0.003", + "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", + "olmCharacteristic" : "olm:{(0.0,1.0)}" + ], + LineInput) + ) def expectedSet = [ - Optional.of(new ConnectorInputEntityData([ + new ConnectorInputEntityData([ "uuid" : "5dc88077-aeb6-4711-9142-db57287640b1", "id" : "test_switch_AtoB", "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", @@ -137,8 +140,8 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { SwitchInput, rgtd.nodeA, rgtd.nodeB - )), - Optional.of(new ConnectorInputEntityData([ + ), + new ConnectorInputEntityData([ "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", "id" : "test_lineCtoD", "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", @@ -153,7 +156,7 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { LineInput, rgtd.nodeC, rgtd.nodeD - )) + ) ] as Set def nodes = [ @@ -168,7 +171,11 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { then: "everything is fine" actualSet.size() == expectedSet.size() - actualSet.containsAll(expectedSet) + actualSet.every { + it.success + } + + actualSet.stream().map { it.data.get() }.toList().containsAll(expectedSet) } def "The CsvRawGridSource is able to add a type to untyped ConnectorInputEntityData correctly"() { @@ -233,7 +240,7 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { rgtd.nodeD ) - def expectedTypedEntityData = Optional.of(new TypedConnectorInputEntityData([ + def expectedTypedEntityData = new TypedConnectorInputEntityData([ "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", "id" : "test_lineCtoD", "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", @@ -248,7 +255,7 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { rgtd.nodeC, rgtd.nodeD, rgtd.lineTypeInputCtoD - )) + ) def availableTypes = [rgtd.lineTypeInputCtoD] @@ -256,7 +263,8 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { def actual = source.findAndAddType(validConnectorEntityData, availableTypes) then: "everything is fine" - actual == expectedTypedEntityData + actual.success + actual.data.get() == expectedTypedEntityData } def "The CsvRawGridSource is able to identify ConnectorInputEntityData data with non matching type requirements correctly"() { @@ -284,47 +292,46 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { def actual = source.findAndAddType(validConnectorEntityData, availableTypes) then: "everything is fine" - !actual.present + actual.failure } def "The CsvRawGridSource is able to convert a stream of valid ConnectorInputEntityData to TypedConnectorInputEntityData"() { given: "valid input data" - def validStream = Stream.of( - Optional.of(new ConnectorInputEntityData([ - "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", - "id" : "test_lineCtoD", - "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", - "operatesFrom" : "2020-03-24 15:11:31", - "operatesUntil" : "2020-03-24 15:11:31", - "parallelDevices" : "2", - "type" : "3bed3eb3-9790-4874-89b5-a5434d408088", - "length" : "0.003", - "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", - "olmCharacteristic" : "olm:{(0.0,1.0)}" - ], - LineInput, - rgtd.nodeC, - rgtd.nodeD - )), - Optional.of(new ConnectorInputEntityData([ - "uuid" : "92ec3bcf-1777-4d38-af67-0bf7c9fa73c7", - "id" : "test_line_AtoB", - "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", - "operatesFrom" : "2020-03-24 15:11:31", - "operatesUntil" : "2020-03-24 15:11:31", - "parallelDevices" : "2", - "type" : "3bed3eb3-9790-4874-89b5-a5434d408088", - "length" : "0.003", - "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", - "olmCharacteristic" : "olm:{(0.0,1.0)}" - ], LineInput, - rgtd.nodeA, - rgtd.nodeB - )) - ) + def validStream = Stream.of(new Try.Success<>( + new ConnectorInputEntityData([ + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "test_lineCtoD", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "2", + "type" : "3bed3eb3-9790-4874-89b5-a5434d408088", + "length" : "0.003", + "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", + "olmCharacteristic" : "olm:{(0.0,1.0)}" + ], + LineInput, + rgtd.nodeC, + rgtd.nodeD + )), + new Try.Success<>(new ConnectorInputEntityData([ + "uuid" : "92ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "test_line_AtoB", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "2", + "type" : "3bed3eb3-9790-4874-89b5-a5434d408088", + "length" : "0.003", + "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", + "olmCharacteristic" : "olm:{(0.0,1.0)}" + ], LineInput, + rgtd.nodeA, + rgtd.nodeB + ))) as Stream> def expectedSet = [ - Optional.of(new TypedConnectorInputEntityData<>([ + new TypedConnectorInputEntityData<>([ "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", "id" : "test_lineCtoD", "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", @@ -339,8 +346,8 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { rgtd.nodeC, rgtd.nodeD, rgtd.lineTypeInputCtoD - )), - Optional.of(new TypedConnectorInputEntityData<>([ + ), + new TypedConnectorInputEntityData<>([ "uuid" : "92ec3bcf-1777-4d38-af67-0bf7c9fa73c7", "id" : "test_line_AtoB", "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", @@ -354,7 +361,7 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { rgtd.nodeA, rgtd.nodeB, rgtd.lineTypeInputCtoD - )) + ) ] def availableTypes = [rgtd.lineTypeInputCtoD] @@ -364,7 +371,12 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { then: "everything is fine" actualSet.size() == expectedSet.size() - actualSet.containsAll(expectedSet) + actualSet.every { + it.success + } + actualSet.stream().map { + it.data.get() + }.toList().containsAll(expectedSet) } def "The CsvRawGridSource is able to add the third node for a three winding transformer correctly"() { @@ -385,7 +397,7 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { rgtd.nodeB, rgtd.transformerTypeAtoBtoC) - def expected = Optional.of(new Transformer3WInputEntityData([ + def expected = new Transformer3WInputEntityData([ "uuid" : "cc327469-7d56-472b-a0df-edbb64f90e8f", "id" : "3w_test", "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", @@ -399,7 +411,7 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { rgtd.nodeA, rgtd.nodeB, rgtd.nodeC, - rgtd.transformerTypeAtoBtoC)) + rgtd.transformerTypeAtoBtoC) def availableNodes = [ rgtd.nodeA, @@ -411,7 +423,8 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { def actual = source.addThirdNode(typedEntityData, availableNodes) then: "everything is fine" - actual == expected + actual.success + actual.data.get() == expected } def "The CsvRawGridSource is NOT able to add the third node for a three winding transformer, if it is not available"() { @@ -442,12 +455,12 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { def actual = source.addThirdNode(typedEntityData, availableNodes) then: "everything is fine" - !actual.present + actual.failure } def "The CsvRawGridSource is able to add the third node for a three winding transformer to a stream of candidates"() { given: "suitable input data" - def inputStream = Stream.of(Optional.of(new TypedConnectorInputEntityData([ + def inputStream = Stream.of(Try.of(() -> new TypedConnectorInputEntityData([ "uuid" : "cc327469-7d56-472b-a0df-edbb64f90e8f", "id" : "3w_test", "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", @@ -461,8 +474,8 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { Transformer3WInput, rgtd.nodeA, rgtd.nodeB, - rgtd.transformerTypeAtoBtoC)), - Optional.of(new TypedConnectorInputEntityData([ + rgtd.transformerTypeAtoBtoC), SourceException), + Try.of(() -> new TypedConnectorInputEntityData([ "uuid" : "cc327469-7d56-472b-a0df-edbb64f90e8f", "id" : "3w_test", "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", @@ -476,8 +489,7 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { Transformer3WInput, rgtd.nodeA, rgtd.nodeB, - rgtd.transformerTypeAtoBtoC)) - ) + rgtd.transformerTypeAtoBtoC), SourceException)) def availableNodes = [ rgtd.nodeA, @@ -485,31 +497,35 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { rgtd.nodeC ] - def expectedSet = [ - Optional.of(new Transformer3WInputEntityData([ - "uuid" : "cc327469-7d56-472b-a0df-edbb64f90e8f", - "id" : "3w_test", - "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", - "operatesFrom" : "2020-03-24 15:11:31", - "operatesUntil" : "2020-03-24 15:11:31", - "parallelDevices" : "1", - "tapPos" : "0", - "autoTap" : "true" - ], - Transformer3WInput, - rgtd.nodeA, - rgtd.nodeB, - rgtd.nodeC, - rgtd.transformerTypeAtoBtoC)), - Optional.empty() - ] + def expected = new Transformer3WInputEntityData([ + "uuid" : "cc327469-7d56-472b-a0df-edbb64f90e8f", + "id" : "3w_test", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "1", + "tapPos" : "0", + "autoTap" : "true" + ], + Transformer3WInput, + rgtd.nodeA, + rgtd.nodeB, + rgtd.nodeC, + rgtd.transformerTypeAtoBtoC) when: "the sources tries to add nodes" def actualSet = source.buildTransformer3WEntityData(inputStream, availableNodes).collect(Collectors.toSet()) + def successes = actualSet.stream().filter { + it.success + }.toList() + def failures = actualSet.stream().filter { + it.failure + }.toList() then: "everything is fine" - actualSet.size() == expectedSet.size() - actualSet.containsAll(expectedSet) + actualSet.size() == 2 + successes.get(0).data.get() == expected + failures.get(0).exception.get().class == SourceException } def "The CsvRawGridSource is able to load all nodes from file"() { @@ -691,38 +707,38 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { when: "loading a total grid structure from file" def actual = source.getGridData() def expected = new RawGridElements( - [ - rgtd.nodeA, - rgtd.nodeB, - rgtd.nodeC, - rgtd.nodeD, - rgtd.nodeE, - rgtd.nodeF, - rgtd.nodeG - ] as Set, - [ - rgtd.lineAtoB, - rgtd.lineCtoD - ] as Set, - [ - GridTestData.transformerBtoD, - GridTestData.transformerBtoE, - GridTestData.transformerCtoE, - GridTestData.transformerCtoF, - GridTestData.transformerCtoG - ] as Set, - [ - GridTestData.transformerAtoBtoC - ] as Set, - [rgtd.switchAtoB] as Set, - [ - rgtd.measurementUnitInput - ] as Set - ) + [ + rgtd.nodeA, + rgtd.nodeB, + rgtd.nodeC, + rgtd.nodeD, + rgtd.nodeE, + rgtd.nodeF, + rgtd.nodeG + ] as Set, + [ + rgtd.lineAtoB, + rgtd.lineCtoD + ] as Set, + [ + GridTestData.transformerBtoD, + GridTestData.transformerBtoE, + GridTestData.transformerCtoE, + GridTestData.transformerCtoF, + GridTestData.transformerCtoG + ] as Set, + [ + GridTestData.transformerAtoBtoC + ] as Set, + [rgtd.switchAtoB] as Set, + [ + rgtd.measurementUnitInput + ] as Set + ) then: "all elements are there" - actual.present - actual.get().with { + actual != null + actual.with { /* It's okay, to only test the uuids, because content is tested with the other test mehtods */ assert nodes.size() == expected.nodes.size() assert nodes.each {entry -> expected.nodes.contains({it.uuid == entry.uuid})} @@ -739,27 +755,29 @@ class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { } } - def "The CsvRawGridSource returns an empty Optional, if one mandatory element for the RawGridElements is missing"() { + def "The CsvRawGridSource throws a rawInputDataException, if one mandatory element for the RawGridElements is missing"() { given: "a source pointing to malformed grid data" - CsvTypeSource typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) - source = new CsvRawGridSource(csvSep, gridMalformedFolderPath, fileNamingStrategy, typeSource) + TypeSource typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, fileNamingStrategy)) + source = new RawGridSource(typeSource, new CsvDataSource(csvSep, gridMalformedFolderPath, fileNamingStrategy)) when: "loading a total grid structure from file" def actual = source.getGridData() then: "the optional is empty" - !actual.present + actual == null + SourceException ex = thrown() + ex.message == "edu.ie3.datamodel.exceptions.FailureException: 1 exception(s) occurred within \"NodeInput\" data, one is: edu.ie3.datamodel.exceptions.FactoryException: An error occurred when creating instance of NodeInput.class." } - def "The CsvRawGridSource returns an empty Optional, if the RawGridElements contain no single element"() { + def "The CsvRawGridSource returns an empty grid, if the RawGridElements contain no single element"() { given: "a source pointing to malformed grid data" - CsvTypeSource typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) - source = new CsvRawGridSource(csvSep, gridEmptyFolderPath, fileNamingStrategy, typeSource) + TypeSource typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, fileNamingStrategy)) + source = new RawGridSource(typeSource, new CsvDataSource(csvSep, gridEmptyFolderPath, fileNamingStrategy)) when: "loading a total grid structure from file" def actual = source.getGridData() then: "the optional is empty" - !actual.present + actual.allEntitiesAsList().empty } } \ No newline at end of file diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvResultEntitySourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvResultEntitySourceTest.groovy index ebb67c345..65692269d 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvResultEntitySourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvResultEntitySourceTest.groovy @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.source.csv +import edu.ie3.datamodel.io.source.ResultEntitySource import spock.lang.Specification import edu.ie3.test.common.ResultEntityTestData as retd @@ -13,7 +14,7 @@ class CsvResultEntitySourceTest extends Specification implements CsvTestDataMeta def "A CsvResultEntitySource should read a csv and extract entities correctly"() { given: - def csvResultEntitySource = new CsvResultEntitySource(csvSep, resultEntitiesFolderPath, fileNamingStrategy) + def csvResultEntitySource = new ResultEntitySource(new CsvDataSource(csvSep, resultEntitiesFolderPath, fileNamingStrategy)) when: // existent diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvSystemParticipantSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvSystemParticipantSourceTest.groovy index ace6ae25b..43b2764a2 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvSystemParticipantSourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvSystemParticipantSourceTest.groovy @@ -5,11 +5,16 @@ */ package edu.ie3.datamodel.io.source.csv +import edu.ie3.datamodel.exceptions.SourceException +import edu.ie3.datamodel.exceptions.SystemParticipantsException import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData import edu.ie3.datamodel.io.factory.input.participant.ChpInputEntityData import edu.ie3.datamodel.io.factory.input.participant.HpInputEntityData import edu.ie3.datamodel.io.factory.input.participant.SystemParticipantTypedEntityData import edu.ie3.datamodel.io.source.RawGridSource +import edu.ie3.datamodel.io.source.SystemParticipantSource +import edu.ie3.datamodel.io.source.ThermalSource +import edu.ie3.datamodel.io.source.TypeSource import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.system.BmInput @@ -24,6 +29,7 @@ import edu.ie3.datamodel.models.input.system.StorageInput import edu.ie3.datamodel.models.input.system.WecInput import edu.ie3.datamodel.models.input.thermal.ThermalBusInput import edu.ie3.datamodel.models.input.thermal.ThermalStorageInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.common.SystemParticipantTestData as sptd import spock.lang.Specification @@ -31,65 +37,74 @@ class CsvSystemParticipantSourceTest extends Specification implements CsvTestDat def "A CsvSystemParticipantSource should provide an instance of SystemParticipants based on valid input data correctly"() { given: - def typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) - def thermalSource = new CsvThermalSource(csvSep, participantsFolderPath, fileNamingStrategy, typeSource) - def rawGridSource = new CsvRawGridSource(csvSep, gridDefaultFolderPath, fileNamingStrategy, typeSource) - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, - participantsFolderPath, fileNamingStrategy, typeSource, - thermalSource, rawGridSource) + def typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, fileNamingStrategy)) + def thermalSource = new ThermalSource(typeSource, new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) + def rawGridSource = new RawGridSource(typeSource, new CsvDataSource(csvSep, gridDefaultFolderPath, fileNamingStrategy)) + def csvSystemParticipantSource = new SystemParticipantSource( + typeSource, + thermalSource, + rawGridSource, + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) when: - def systemParticipantsOpt = csvSystemParticipantSource.getSystemParticipants() + def systemParticipants = csvSystemParticipantSource.systemParticipants then: - systemParticipantsOpt.present - systemParticipantsOpt.ifPresent({ systemParticipants -> - assert (systemParticipants.allEntitiesAsList().size() == 11) - assert (systemParticipants.getPvPlants().first().uuid == sptd.pvInput.uuid) - assert (systemParticipants.getBmPlants().first().uuid == sptd.bmInput.uuid) - assert (systemParticipants.getChpPlants().first().uuid == sptd.chpInput.uuid) - assert (systemParticipants.getEvs().first().uuid == sptd.evInput.uuid) - assert (systemParticipants.getFixedFeedIns().first().uuid == sptd.fixedFeedInInput.uuid) - assert (systemParticipants.getHeatPumps().first().uuid == sptd.hpInput.uuid) - assert (systemParticipants.getLoads().first().uuid == sptd.loadInput.uuid) - assert (systemParticipants.getWecPlants().first().uuid == sptd.wecInput.uuid) - assert (systemParticipants.getStorages().first().uuid == sptd.storageInput.uuid) - assert (systemParticipants.getEvCS().first().uuid == sptd.evcsInput.uuid) - assert (systemParticipants.getEmSystems().first().uuid == sptd.emInput.uuid) - }) + systemParticipants.allEntitiesAsList().size() == 11 + systemParticipants.pvPlants.first().uuid == sptd.pvInput.uuid + systemParticipants.bmPlants.first().uuid == sptd.bmInput.uuid + systemParticipants.chpPlants.first().uuid == sptd.chpInput.uuid + systemParticipants.evs.first().uuid == sptd.evInput.uuid + systemParticipants.fixedFeedIns.first().uuid == sptd.fixedFeedInInput.uuid + systemParticipants.heatPumps.first().uuid == sptd.hpInput.uuid + systemParticipants.loads.first().uuid == sptd.loadInput.uuid + systemParticipants.wecPlants.first().uuid == sptd.wecInput.uuid + systemParticipants.storages.first().uuid == sptd.storageInput.uuid + systemParticipants.evCS.first().uuid == sptd.evcsInput.uuid + systemParticipants.emSystems.first().uuid == sptd.emInput.uuid } def "A CsvSystemParticipantSource should process invalid input data as expected when requested to provide an instance of SystemParticipants"() { given: - def typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) - def thermalSource = new CsvThermalSource(csvSep, participantsFolderPath, fileNamingStrategy, typeSource) - def rawGridSource = Spy(CsvRawGridSource, constructorArgs: [ - csvSep, - gridDefaultFolderPath, - fileNamingStrategy, - typeSource + def typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, fileNamingStrategy)) + def thermalSource = new ThermalSource(typeSource, new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) + def rawGridSource = Spy(RawGridSource, constructorArgs: [ + typeSource, + new CsvDataSource(csvSep, gridDefaultFolderPath, fileNamingStrategy) ]) { // partly fake the return method of the csv raw grid source to always return empty node sets // -> elements to build NodeGraphicInputs are missing getNodes() >> new HashSet() getNodes(_) >> new HashSet() } as RawGridSource - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, - participantsFolderPath, fileNamingStrategy, typeSource, - thermalSource, rawGridSource) + def csvSystemParticipantSource = new SystemParticipantSource( + typeSource, + thermalSource, + rawGridSource, + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) when: - def systemParticipantsOpt = csvSystemParticipantSource.getSystemParticipants() + def systemParticipants = Try.of(() -> csvSystemParticipantSource.systemParticipants, SystemParticipantsException) then: - !systemParticipantsOpt.present + systemParticipants.failure + systemParticipants.data == Optional.empty() + + Exception ex = systemParticipants.exception.get() + ex.class == SystemParticipantsException + ex.message.startsWith("11 error(s) occurred while initializing system participants. " + + "edu.ie3.datamodel.exceptions.FailureException: 1 exception(s) occurred within \"FixedFeedInInput\" data, one is: " + + "edu.ie3.datamodel.exceptions.FactoryException: edu.ie3.datamodel.exceptions.SourceException: " + + "Failure due to: Skipping FixedFeedInInput with uuid ") } def "A CsvSystemParticipantSource should build typed entity from valid and invalid input data as expected"() { given: - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, - participantsFolderPath, fileNamingStrategy, Mock(CsvTypeSource), - Mock(CsvThermalSource), Mock(CsvRawGridSource)) + def csvSystemParticipantSource = new SystemParticipantSource( + Mock(TypeSource), + Mock(ThermalSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) def nodeAssetInputEntityData = new NodeAssetInputEntityData(fieldsToAttributes, clazz, operator, node) @@ -97,25 +112,27 @@ class CsvSystemParticipantSourceTest extends Specification implements CsvTestDat def typedEntityDataOpt = csvSystemParticipantSource.buildTypedEntityData(nodeAssetInputEntityData, types) then: - typedEntityDataOpt.present == resultIsPresent - typedEntityDataOpt.ifPresent({ typedEntityData -> + typedEntityDataOpt.success == resultIsPresent + typedEntityDataOpt.data.ifPresent({ typedEntityData -> assert (typedEntityData == resultData) }) where: types | node | operator | fieldsToAttributes | clazz || resultIsPresent || resultData []| sptd.chpInput.node | sptd.chpInput.operator | ["type": "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8"] | ChpInput || false || null - [sptd.chpTypeInput]| sptd.chpInput.node | sptd.chpInput.operator | ["bla": "foo"] | ChpInput || false || null - [sptd.chpTypeInput]| sptd.chpInput.node | sptd.chpInput.operator | [:] | ChpInput || false || null - [sptd.chpTypeInput]| sptd.chpInput.node | sptd.chpInput.operator | ["type": "5ebd8f7e-dedb-4017-bb86-6373c4b68eb9"] | ChpInput || false || null - [sptd.chpTypeInput]| sptd.chpInput.node | sptd.chpInput.operator | ["type": "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8"] | ChpInput || true || new SystemParticipantTypedEntityData<>([:], clazz, operator, node, sptd.chpTypeInput) + [sptd.chpTypeInput] | sptd.chpInput.node | sptd.chpInput.operator | ["bla": "foo"] | ChpInput || false || null + [sptd.chpTypeInput] | sptd.chpInput.node | sptd.chpInput.operator | [:] | ChpInput || false || null + [sptd.chpTypeInput] | sptd.chpInput.node | sptd.chpInput.operator | ["type": "5ebd8f7e-dedb-4017-bb86-6373c4b68eb9"] | ChpInput || false || null + [sptd.chpTypeInput] | sptd.chpInput.node | sptd.chpInput.operator | ["type": "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8"] | ChpInput || true || new SystemParticipantTypedEntityData<>([:], clazz, operator, node, sptd.chpTypeInput) } def "A CsvSystemParticipantSource should build hp input entity from valid and invalid input data as expected"() { given: - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, - participantsFolderPath, fileNamingStrategy, Mock(CsvTypeSource), - Mock(CsvThermalSource), Mock(CsvRawGridSource)) + def csvSystemParticipantSource = new SystemParticipantSource( + Mock(TypeSource), + Mock(ThermalSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) def sysPartTypedEntityData = new SystemParticipantTypedEntityData<>(fieldsToAttributes, HpInput, sptd.hpInput.operator, sptd.hpInput.node, sptd.hpTypeInput) @@ -123,25 +140,27 @@ class CsvSystemParticipantSourceTest extends Specification implements CsvTestDat def hpInputEntityDataOpt = csvSystemParticipantSource.buildHpEntityData(sysPartTypedEntityData, thermalBuses) then: - hpInputEntityDataOpt.present == resultIsPresent - hpInputEntityDataOpt.ifPresent({ hpInputEntityData -> + hpInputEntityDataOpt.success == resultIsPresent + hpInputEntityDataOpt.data.ifPresent({ hpInputEntityData -> assert (hpInputEntityData == resultData) }) where: thermalBuses | fieldsToAttributes || resultIsPresent || resultData - []| ["thermalBus": "0d95d7f2-49fb-4d49-8636-383a5220384e"] || false || null - [sptd.hpInput.thermalBus]| ["bla": "foo"] || false || null - [sptd.hpInput.thermalBus]| [:] || false || null - [sptd.hpInput.thermalBus]| ["thermalBus": "0d95d7f2-49fb-4d49-8636-383a5220384f"] || false || null - [sptd.hpInput.thermalBus]| ["thermalBus": "0d95d7f2-49fb-4d49-8636-383a5220384e"] || true || new HpInputEntityData([:], sptd.hpInput.operator, sptd.hpInput.node, sptd.hpTypeInput, sptd.hpInput.thermalBus) + [] | ["thermalBus": "0d95d7f2-49fb-4d49-8636-383a5220384e"] || false || null + [sptd.hpInput.thermalBus] | ["bla": "foo"] || false || null + [sptd.hpInput.thermalBus] | [:] || false || null + [sptd.hpInput.thermalBus] | ["thermalBus": "0d95d7f2-49fb-4d49-8636-383a5220384f"] || false || null + [sptd.hpInput.thermalBus] | ["thermalBus": "0d95d7f2-49fb-4d49-8636-383a5220384e"] || true || new HpInputEntityData([:], sptd.hpInput.operator, sptd.hpInput.node, sptd.hpTypeInput, sptd.hpInput.thermalBus) } def "A CsvSystemParticipantSource should build chp input entity from valid and invalid input data as expected"(List thermalStorages, List thermalBuses, Map fieldsToAttributes, boolean resultIsPresent, ChpInputEntityData resultData) { given: - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, - participantsFolderPath, fileNamingStrategy, Mock(CsvTypeSource), - Mock(CsvThermalSource), Mock(CsvRawGridSource)) + def csvSystemParticipantSource = new SystemParticipantSource( + Mock(TypeSource), + Mock(ThermalSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) def sysPartTypedEntityData = new SystemParticipantTypedEntityData<>(fieldsToAttributes, ChpInput, sptd.chpInput.operator, sptd.chpInput.node, sptd.chpTypeInput) @@ -149,8 +168,8 @@ class CsvSystemParticipantSourceTest extends Specification implements CsvTestDat def hpInputEntityDataOpt = csvSystemParticipantSource.buildChpEntityData(sysPartTypedEntityData, thermalStorages, thermalBuses) then: - hpInputEntityDataOpt.present == resultIsPresent - hpInputEntityDataOpt.ifPresent({ hpInputEntityData -> + hpInputEntityDataOpt.success == resultIsPresent + hpInputEntityDataOpt.data.ifPresent({ hpInputEntityData -> assert (hpInputEntityData == resultData) }) @@ -170,35 +189,51 @@ class CsvSystemParticipantSourceTest extends Specification implements CsvTestDat def "A CsvSystemParticipantSource should return data from a valid heat pump input file as expected"() { given: - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, - fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + def csvSystemParticipantSource = new SystemParticipantSource( + Mock(TypeSource), + Mock(ThermalSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) expect: - def heatPumps = csvSystemParticipantSource.getHeatPumps(nodes as Set, operators as Set, types as Set, thermalBuses as Set) - heatPumps.size() == resultingSize - heatPumps == resultingSet as Set + def heatPumps = Try.of(() -> csvSystemParticipantSource.getHeatPumps(nodes as Set, operators as Set, types as Set, thermalBuses as Set), SourceException) + + if (heatPumps.success) { + heatPumps.data.get().size() == resultingSize + heatPumps.data.get() == resultingSet as Set + } else { + heatPumps.exception.get().class == SourceException + } where: nodes | operators | types | thermalBuses || resultingSize || resultingSet - [sptd.hpInput.node]| [sptd.hpInput.operator]| [sptd.hpInput.type]| [sptd.hpInput.thermalBus]|| 1 || [sptd.hpInput] - [sptd.hpInput.node]| []| [sptd.hpInput.type]| [sptd.hpInput.thermalBus]|| 1 || [ + [sptd.hpInput.node] | [sptd.hpInput.operator] | [sptd.hpInput.type] | [sptd.hpInput.thermalBus] || 1 || [sptd.hpInput] + [sptd.hpInput.node] | [] | [sptd.hpInput.type] | [sptd.hpInput.thermalBus] || 1 || [ new HpInput(sptd.hpInput.uuid, sptd.hpInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.hpInput.operationTime, sptd.hpInput.node, sptd.hpInput.thermalBus, sptd.hpInput.qCharacteristics, sptd.hpInput.type) ] - []| []| []| []|| 0 || [] - [sptd.hpInput.node]| []| []| []|| 0 || [] - [sptd.hpInput.node]| [sptd.hpInput.operator]| []| []|| 0 || [] - [sptd.hpInput.node]| [sptd.hpInput.operator]| [sptd.hpInput.type]| []|| 0 || [] + [] | [] | [] | [] || 0 || [] + [sptd.hpInput.node] | [] | [] | [] || 0 || [] + [sptd.hpInput.node] | [sptd.hpInput.operator] | [] | [] || 0 || [] + [sptd.hpInput.node] | [sptd.hpInput.operator] | [sptd.hpInput.type] | [] || 0 || [] } def "A CsvSystemParticipantSource should return data from a valid chp input file as expected"() { given: - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, - fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + def csvSystemParticipantSource = new SystemParticipantSource( + Mock(TypeSource), + Mock(ThermalSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) expect: - def chpUnits = csvSystemParticipantSource.getChpPlants(nodes as Set, operators as Set, types as Set, thermalBuses as Set, thermalStorages as Set) - chpUnits.size() == resultingSize - chpUnits == resultingSet as Set + def chpUnits = Try.of(() -> csvSystemParticipantSource.getChpPlants(nodes as Set, operators as Set, types as Set, thermalBuses as Set, thermalStorages as Set), SourceException) + + if (chpUnits.success) { + chpUnits.data.get().size() == resultingSize + chpUnits.data.get() == resultingSet as Set + } else { + chpUnits.exception.get().class == SourceException + } where: nodes | operators | types | thermalBuses | thermalStorages || resultingSize || resultingSet @@ -210,105 +245,145 @@ class CsvSystemParticipantSourceTest extends Specification implements CsvTestDat ] as List || 1 || [ new ChpInput(sptd.chpInput.uuid, sptd.chpInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.chpInput.operationTime, sptd.chpInput.node, sptd.chpInput.thermalBus, sptd.chpInput.qCharacteristics, sptd.chpInput.type, sptd.chpInput.thermalStorage, sptd.chpInput.marketReaction) ] - []| []| []| []| [] as List || 0 || [] - [sptd.chpInput.node]| []| []| []| [] as List || 0 || [] - [sptd.chpInput.node]| [sptd.chpInput.operator]| []| []| [] as List || 0 || [] - [sptd.chpInput.node]| [sptd.chpInput.operator]| [sptd.chpInput.type]| []| [] as List || 0 || [] + [] | [] | [] | [] | [] as List || 0 || [] + [sptd.chpInput.node] | [] | [] | [] | [] as List || 0 || [] + [sptd.chpInput.node] | [sptd.chpInput.operator] | [] | [] | [] as List || 0 || [] + [sptd.chpInput.node] | [sptd.chpInput.operator] | [sptd.chpInput.type] | [] | [] as List || 0 || [] } def "A CsvSystemParticipantSource should return data from valid ev input file as expected"() { given: - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, - fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + def csvSystemParticipantSource = new SystemParticipantSource( + Mock(TypeSource), + Mock(ThermalSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) expect: - def sysParts = csvSystemParticipantSource.getEvs(nodes as Set, operators as Set, types as Set) - sysParts.size() == resultingSize - sysParts == resultingSet as Set + def sysParts = Try.of(() -> csvSystemParticipantSource.getEvs(nodes as Set, operators as Set, types as Set), SourceException) + + if (sysParts.success) { + sysParts.data.get().size() == resultingSize + sysParts.data.get() == resultingSet as Set + } else { + sysParts.exception.get().class == SourceException + } where: nodes | operators | types || resultingSize || resultingSet - [sptd.evInput.node]| [sptd.evInput.operator]| [sptd.evInput.type]|| 1 || [sptd.evInput] - [sptd.evInput.node]| []| [sptd.evInput.type]|| 1 || [ + [sptd.evInput.node] | [sptd.evInput.operator] | [sptd.evInput.type] || 1 || [sptd.evInput] + [sptd.evInput.node] | [] | [sptd.evInput.type] || 1 || [ new EvInput(sptd.evInput.uuid, sptd.evInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.evInput.operationTime, sptd.evInput.node, sptd.evInput.qCharacteristics, sptd.evInput.type) ] - [sptd.evInput.node]| [sptd.evInput.operator]| []|| 0 || [] - [sptd.evInput.node]| []| []|| 0 || [] - []| []| []|| 0 || [] + [sptd.evInput.node] | [sptd.evInput.operator] | [] || 0 || [] + [sptd.evInput.node] | [] | [] || 0 || [] + [] | [] | [] || 0 || [] } def "A CsvSystemParticipantSource should return data from valid wec input file as expected"() { given: - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, - fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + def csvSystemParticipantSource = new SystemParticipantSource( + Mock(TypeSource), + Mock(ThermalSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) expect: - def sysParts = csvSystemParticipantSource.getWecPlants(nodes as Set, operators as Set, types as Set) - sysParts.size() == resultingSize - sysParts == resultingSet as Set + def sysParts = Try.of(() -> csvSystemParticipantSource.getWecPlants(nodes as Set, operators as Set, types as Set), SourceException) + + if (sysParts.success) { + sysParts.data.get().size() == resultingSize + sysParts.data.get() == resultingSet as Set + } else { + sysParts.exception.get().class == SourceException + } where: nodes | operators | types || resultingSize || resultingSet - [sptd.wecInput.node]| [sptd.wecInput.operator]| [sptd.wecInput.type]|| 1 || [sptd.wecInput] - [sptd.wecInput.node]| []| [sptd.wecInput.type]|| 1 || [ + [sptd.wecInput.node] | [sptd.wecInput.operator] | [sptd.wecInput.type] || 1 || [sptd.wecInput] + [sptd.wecInput.node] | [] | [sptd.wecInput.type] || 1 || [ new WecInput(sptd.wecInput.uuid, sptd.wecInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.wecInput.operationTime, sptd.wecInput.node, sptd.wecInput.qCharacteristics, sptd.wecInput.type, sptd.wecInput.marketReaction) ] - [sptd.wecInput.node]| [sptd.wecInput.operator]| []|| 0 || [] - [sptd.wecInput.node]| []| []|| 0 || [] - []| []| []|| 0 || [] + [sptd.wecInput.node] | [sptd.wecInput.operator] | [] || 0 || [] + [sptd.wecInput.node] | [] | [] || 0 || [] + [] | [] | [] || 0 || [] } def "A CsvSystemParticipantSource should return data from valid storage input file as expected"() { given: - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, - fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + def csvSystemParticipantSource = new SystemParticipantSource( + Mock(TypeSource), + Mock(ThermalSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) expect: - def sysParts = csvSystemParticipantSource.getStorages(nodes as Set, operators as Set, types as Set) - sysParts.size() == resultingSize - sysParts == resultingSet as Set + def sysParts = Try.of(() -> csvSystemParticipantSource.getStorages(nodes as Set, operators as Set, types as Set), SourceException) + + if (sysParts.success) { + sysParts.data.get().size() == resultingSize + sysParts.data.get() == resultingSet as Set + } else { + sysParts.exception.get().class == SourceException + } where: nodes | operators | types || resultingSize || resultingSet - [sptd.storageInput.node]| [sptd.storageInput.operator]| [sptd.storageInput.type]|| 1 || [sptd.storageInput] - [sptd.storageInput.node]| []| [sptd.storageInput.type]|| 1 || [ + [sptd.storageInput.node] | [sptd.storageInput.operator] | [sptd.storageInput.type] || 1 || [sptd.storageInput] + [sptd.storageInput.node] | [] | [sptd.storageInput.type] || 1 || [ new StorageInput(sptd.storageInput.uuid, sptd.storageInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.storageInput.operationTime, sptd.storageInput.node, sptd.storageInput.qCharacteristics, sptd.storageInput.type) ] - [sptd.storageInput.node]| [sptd.storageInput.operator]| []|| 0 || [] - [sptd.storageInput.node]| []| []|| 0 || [] - []| []| []|| 0 || [] + [sptd.storageInput.node] | [sptd.storageInput.operator] | [] || 0 || [] + [sptd.storageInput.node] | [] | [] || 0 || [] + [] | [] | [] || 0 || [] } def "A CsvSystemParticipantSource should return data from valid bm input file as expected"() { given: - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, - fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + def csvSystemParticipantSource = new SystemParticipantSource( + Mock(TypeSource), + Mock(ThermalSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) expect: - def sysParts = csvSystemParticipantSource.getBmPlants(nodes as Set, operators as Set, types as Set) - sysParts.size() == resultingSize - sysParts == resultingSet as Set + def sysParts = Try.of(() -> csvSystemParticipantSource.getBmPlants(nodes as Set, operators as Set, types as Set), SourceException) + + if (sysParts.success) { + sysParts.data.get().size() == resultingSize + sysParts.data.get() == resultingSet as Set + } else { + sysParts.exception.get().class == SourceException + } where: nodes | operators | types || resultingSize || resultingSet - [sptd.bmInput.node]| [sptd.bmInput.operator]| [sptd.bmInput.type]|| 1 || [sptd.bmInput] - [sptd.bmInput.node]| []| [sptd.bmInput.type]|| 1 || [ + [sptd.bmInput.node] | [sptd.bmInput.operator] | [sptd.bmInput.type] || 1 || [sptd.bmInput] + [sptd.bmInput.node] | [] | [sptd.bmInput.type] || 1 || [ new BmInput(sptd.bmInput.uuid, sptd.bmInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.bmInput.operationTime, sptd.bmInput.node, sptd.bmInput.qCharacteristics, sptd.bmInput.type, sptd.bmInput.marketReaction, sptd.bmInput.costControlled, sptd.bmInput.feedInTariff) ] - [sptd.bmInput.node]| [sptd.bmInput.operator]| []|| 0 || [] - [sptd.bmInput.node]| []| []|| 0 || [] - []| []| []|| 0 || [] + [sptd.bmInput.node] | [sptd.bmInput.operator] | [] || 0 || [] + [sptd.bmInput.node] | [] | [] || 0 || [] + [] | [] | [] || 0 || [] } def "A CsvSystemParticipantSource should return data from valid ev charging station input file as expected"() { given: - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, - fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + def csvSystemParticipantSource = new SystemParticipantSource( + Mock(TypeSource), + Mock(ThermalSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) expect: - def sysParts = csvSystemParticipantSource.getEvCS(nodes as Set, operators as Set) - sysParts.size() == resultingSize - sysParts == resultingSet as Set + def sysParts = Try.of(() -> csvSystemParticipantSource.getEvCS(nodes as Set, operators as Set), SourceException) + + if (sysParts.success) { + sysParts.data.get().size() == resultingSize + sysParts.data.get() == resultingSet as Set + } else { + sysParts.exception.get().class == SourceException + } where: nodes | operators || resultingSize || resultingSet @@ -322,77 +397,109 @@ class CsvSystemParticipantSourceTest extends Specification implements CsvTestDat def "A CsvSystemParticipantSource should return data from valid load input file as expected"() { given: - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, - fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + def csvSystemParticipantSource = new SystemParticipantSource( + Mock(TypeSource), + Mock(ThermalSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) expect: - def sysParts = csvSystemParticipantSource.getLoads(nodes as Set, operators as Set) - sysParts.size() == resultingSize - sysParts == resultingSet as Set + def sysParts = Try.of(() -> csvSystemParticipantSource.getLoads(nodes as Set, operators as Set), SourceException) + + if (sysParts.success) { + sysParts.data.get().size() == resultingSize + sysParts.data.get() == resultingSet as Set + } else { + sysParts.exception.get().class == SourceException + } where: nodes | operators || resultingSize || resultingSet - [sptd.loadInput.node]| [sptd.loadInput.operator]|| 1 || [sptd.loadInput] - [sptd.loadInput.node]| []|| 1 || [ + [sptd.loadInput.node] | [sptd.loadInput.operator] || 1 || [sptd.loadInput] + [sptd.loadInput.node] | [] || 1 || [ new LoadInput(sptd.loadInput.uuid, sptd.loadInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.loadInput.operationTime, sptd.loadInput.node, sptd.loadInput.qCharacteristics, sptd.loadInput.loadProfile, sptd.loadInput.dsm, sptd.loadInput.eConsAnnual, sptd.loadInput.sRated, sptd.loadInput.cosPhiRated) ] - []| [sptd.loadInput.operator]|| 0 || [] - []| []|| 0 || [] + [] | [sptd.loadInput.operator] || 0 || [] + [] | [] || 0 || [] } def "A CsvSystemParticipantSource should return data from valid pv input file as expected"() { given: - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, - fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + def csvSystemParticipantSource = new SystemParticipantSource( + Mock(TypeSource), + Mock(ThermalSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) expect: - def sysParts = csvSystemParticipantSource.getPvPlants(nodes as Set, operators as Set) - sysParts.size() == resultingSize - sysParts == resultingSet as Set + def sysParts = Try.of(() -> csvSystemParticipantSource.getPvPlants(nodes as Set, operators as Set), SourceException) + + if (sysParts.success) { + sysParts.data.get().size() == resultingSize + sysParts.data.get() == resultingSet as Set + } else { + sysParts.exception.get().class == SourceException + } where: nodes | operators || resultingSize || resultingSet - [sptd.pvInput.node]| [sptd.pvInput.operator]|| 1 || [sptd.pvInput] - [sptd.pvInput.node]| []|| 1 || [ + [sptd.pvInput.node] | [sptd.pvInput.operator] || 1 || [sptd.pvInput] + [sptd.pvInput.node] | [] || 1 || [ new PvInput(sptd.pvInput.uuid, sptd.pvInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.pvInput.operationTime, sptd.pvInput.node, sptd.pvInput.qCharacteristics, sptd.pvInput.albedo, sptd.pvInput.azimuth, sptd.pvInput.etaConv, sptd.pvInput.elevationAngle, sptd.pvInput.kG, sptd.pvInput.kT, sptd.pvInput.marketReaction, sptd.pvInput.sRated, sptd.pvInput.cosPhiRated) ] - []| [sptd.pvInput.operator]|| 0 || [] - []| []|| 0 || [] + [] | [sptd.pvInput.operator] || 0 || [] + [] | [] || 0 || [] } def "A CsvSystemParticipantSource should return data from valid fixedFeedIn input file as expected"() { given: - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, - fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + def csvSystemParticipantSource = new SystemParticipantSource( + Mock(TypeSource), + Mock(ThermalSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) expect: - def sysParts = csvSystemParticipantSource.getFixedFeedIns(nodes as Set, operators as Set) - sysParts.size() == resultingSize - sysParts == resultingSet as Set + def sysParts = Try.of(() -> csvSystemParticipantSource.getFixedFeedIns(nodes as Set, operators as Set), SourceException) + + if (sysParts.success) { + sysParts.data.get().size() == resultingSize + sysParts.data.get() == resultingSet as Set + } else { + sysParts.exception.get().class == SourceException + } where: nodes | operators || resultingSize || resultingSet - [sptd.fixedFeedInInput.node]| [ + [sptd.fixedFeedInInput.node] | [ sptd.fixedFeedInInput.operator ] as List || 1 || [sptd.fixedFeedInInput] - [sptd.fixedFeedInInput.node]| [] as List || 1 || [ + [sptd.fixedFeedInInput.node] | [] as List || 1 || [ new FixedFeedInInput(sptd.fixedFeedInInput.uuid, sptd.fixedFeedInInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.fixedFeedInInput.operationTime, sptd.fixedFeedInInput.node, sptd.fixedFeedInInput.qCharacteristics, sptd.fixedFeedInInput.sRated, sptd.fixedFeedInInput.cosPhiRated) ] - []| [ + [] | [ sptd.fixedFeedInInput.operator ] as List || 0 || [] - []| [] as List || 0 || [] + [] | [] as List || 0 || [] } def "A CsvSystemParticipantSource should return data from valid em input file as expected"() { given: - def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, - fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + def csvSystemParticipantSource = new SystemParticipantSource( + Mock(TypeSource), + Mock(ThermalSource), + Mock(RawGridSource), + new CsvDataSource(csvSep, participantsFolderPath, fileNamingStrategy)) expect: - def sysParts = csvSystemParticipantSource.getEmSystems(nodes as Set, operators as Set) - sysParts.size() == resultingSize - sysParts == resultingSet as Set + def sysParts = Try.of(() -> csvSystemParticipantSource.getEmSystems(nodes as Set, operators as Set), SourceException) + + if (sysParts.success) { + sysParts.data.get().size() == resultingSize + sysParts.data.get() == resultingSet as Set + } else { + sysParts.exception.get().class == SourceException + } where: nodes | operators || resultingSize || resultingSet diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTestDataMeta.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTestDataMeta.groovy index 284ca9e6a..fc0b2bf4d 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTestDataMeta.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTestDataMeta.groovy @@ -6,34 +6,51 @@ package edu.ie3.datamodel.io.source.csv import edu.ie3.datamodel.io.naming.FileNamingStrategy +import spock.lang.Shared -import java.nio.file.Paths +import java.nio.file.Path /** * Holds meta data for csv tests e.g. file and folder paths */ trait CsvTestDataMeta { - static String timeSeriesFolderPath = getResourceAbs("_timeseries") - static String graphicsFolderPath = getResourceAbs("_graphics") - static String typeFolderPath = getResourceAbs("_types") - static String participantsFolderPath = getResourceAbs("_participants") - static String resultEntitiesFolderPath = getResourceAbs("_results") - static String thermalFolderPath = getResourceAbs("_thermal") - static String coordinatesIconFolderPath = getResourceAbs("_coordinates/icon") - static String coordinatesCosmoFolderPath = getResourceAbs("_coordinates/cosmo") - static String weatherCosmoFolderPath = getResourceAbs("_weather/cosmo") - static String weatherIconFolderPath = getResourceAbs("_weather/icon") - static String jointGridFolderPath = getResourceAbs("_joint_grid") + @Shared + Path timeSeriesFolderPath = getResourceAbs("_timeseries") + @Shared + Path graphicsFolderPath = getResourceAbs("_graphics") + @Shared + Path typeFolderPath = getResourceAbs("_types") + @Shared + Path participantsFolderPath = getResourceAbs("_participants") + @Shared + Path resultEntitiesFolderPath = getResourceAbs("_results") + @Shared + Path thermalFolderPath = getResourceAbs("_thermal") + @Shared + Path coordinatesIconFolderPath = getResourceAbs("_coordinates/icon") + @Shared + Path coordinatesCosmoFolderPath = getResourceAbs("_coordinates/cosmo") + @Shared + Path weatherCosmoFolderPath = getResourceAbs("_weather/cosmo") + @Shared + Path weatherIconFolderPath = getResourceAbs("_weather/icon") + @Shared + Path jointGridFolderPath = getResourceAbs("_joint_grid") - static String gridDefaultFolderPath = getResourceAbs("_grid/default") - static String gridMalformedFolderPath = getResourceAbs("_grid/malformed") - static String gridEmptyFolderPath = getResourceAbs("_grid/empty") + @Shared + Path gridDefaultFolderPath = getResourceAbs("_grid/default") + @Shared + Path gridMalformedFolderPath = getResourceAbs("_grid/malformed") + @Shared + Path gridEmptyFolderPath = getResourceAbs("_grid/empty") - static String csvSep = "," - static FileNamingStrategy fileNamingStrategy = new FileNamingStrategy() + @Shared + String csvSep = "," + @Shared + FileNamingStrategy fileNamingStrategy = new FileNamingStrategy() - static String getResourceAbs(String directory) { - return Paths.get(CsvTestDataMeta.getResource(directory).toURI()).toString() + Path getResourceAbs(String directory) { + return Path.of(CsvTestDataMeta.getResource(directory).toURI()) } } \ No newline at end of file diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvThermalSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvThermalSourceTest.groovy index 4845d1f3b..bead486fc 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvThermalSourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvThermalSourceTest.groovy @@ -8,6 +8,8 @@ package edu.ie3.datamodel.io.source.csv import edu.ie3.datamodel.io.naming.FileNamingStrategy import edu.ie3.datamodel.io.factory.input.AssetInputEntityData import edu.ie3.datamodel.io.factory.input.ThermalUnitInputEntityData +import edu.ie3.datamodel.io.source.ThermalSource +import edu.ie3.datamodel.io.source.TypeSource import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.thermal.ThermalBusInput import edu.ie3.datamodel.models.input.thermal.ThermalUnitInput @@ -21,8 +23,8 @@ class CsvThermalSourceTest extends Specification implements CsvTestDataMeta { def "A CsvThermalSource should return ThermalBuses from valid and invalid input data as expected"() { given: - def csvTypeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) - def csvThermalSource = new CsvThermalSource(csvSep, thermalFolderPath, fileNamingStrategy, csvTypeSource) + def csvTypeSource = new TypeSource(new CsvDataSource(",", typeFolderPath, new FileNamingStrategy())) + def csvThermalSource = new ThermalSource(csvTypeSource, new CsvDataSource(csvSep, thermalFolderPath, fileNamingStrategy)) def operators = csvTypeSource.operators //test method when no operators are provided as constructor parameters @@ -50,8 +52,8 @@ class CsvThermalSourceTest extends Specification implements CsvTestDataMeta { def "A CsvThermalSource should return a CylindricalStorageInput from valid and invalid input data as expected"() { given: - def csvTypeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) - def csvThermalSource = new CsvThermalSource(csvSep, thermalFolderPath, fileNamingStrategy, csvTypeSource) + def csvTypeSource = new TypeSource(new CsvDataSource(",", typeFolderPath, new FileNamingStrategy())) + def csvThermalSource = new ThermalSource(csvTypeSource, new CsvDataSource(csvSep, thermalFolderPath, fileNamingStrategy)) def operators = csvTypeSource.operators def thermalBuses = csvThermalSource.thermalBuses @@ -88,13 +90,12 @@ class CsvThermalSourceTest extends Specification implements CsvTestDataMeta { resultingCylindricalStorage.first().inletTemp == sptd.inletTemp resultingCylindricalStorage.first().returnTemp == sptd.returnTemp resultingCylindricalStorage.first().c == sptd.c - } def "A CsvThermalSource should build thermal unit input entity from valid and invalid input data as expected"() { given: - def csvTypeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) - def csvThermalSource = new CsvThermalSource(csvSep, thermalFolderPath, fileNamingStrategy, csvTypeSource) + def csvTypeSource = new TypeSource(new CsvDataSource(",", typeFolderPath, new FileNamingStrategy())) + def csvThermalSource = new ThermalSource(csvTypeSource, new CsvDataSource(csvSep, thermalFolderPath, fileNamingStrategy)) def operator = new OperatorInput(UUID.fromString("8f9682df-0744-4b58-a122-f0dc730f6510"), "testOperator") def validFieldsToAttributes = [ "uuid" : "717af017-cc69-406f-b452-e022d7fb516a", @@ -111,8 +112,8 @@ class CsvThermalSourceTest extends Specification implements CsvTestDataMeta { then: resultingDataOpt.size() == 1 - resultingDataOpt.first().present == resultIsPresent - resultingDataOpt.first().ifPresent({ resultingData -> + resultingDataOpt.first().data.present == resultIsPresent + resultingDataOpt.first().data.ifPresent({ resultingData -> assert (resultingData == expectedThermalUnitInputEntityData) }) @@ -130,13 +131,12 @@ class CsvThermalSourceTest extends Specification implements CsvTestDataMeta { ThermalUnitInput, new OperatorInput(UUID.fromString("8f9682df-0744-4b58-a122-f0dc730f6510"), "testOperator"), new ThermalBusInput(UUID.fromString("0d95d7f2-49fb-4d49-8636-383a5220384e"), "test_thermal_bus")) - } def "A CsvThermalSource should return a ThermalHouseInput from valid and invalid input data as expected"() { given: - def csvTypeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) - def csvThermalSource = new CsvThermalSource(csvSep, thermalFolderPath, fileNamingStrategy, csvTypeSource) + def csvTypeSource = new TypeSource(new CsvDataSource(",", typeFolderPath, new FileNamingStrategy())) + def csvThermalSource = new ThermalSource(csvTypeSource, new CsvDataSource(csvSep, thermalFolderPath, fileNamingStrategy)) def operators = csvTypeSource.operators def thermalBuses = csvThermalSource.thermalBuses @@ -175,6 +175,5 @@ class CsvThermalSourceTest extends Specification implements CsvTestDataMeta { resultingThermalHouse.first().targetTemperature == ThermalUnitInputTestData.thermalHouseInput.targetTemperature resultingThermalHouse.first().upperTemperatureLimit == ThermalUnitInputTestData.thermalHouseInput.upperTemperatureLimit resultingThermalHouse.first().lowerTemperatureLimit == ThermalUnitInputTestData.thermalHouseInput.lowerTemperatureLimit - } -} +} \ No newline at end of file diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSourceIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSourceIT.groovy index 23f80df9f..2fa70a2f6 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSourceIT.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSourceIT.groovy @@ -11,6 +11,8 @@ import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme import spock.lang.Shared import spock.lang.Specification +import java.nio.file.Path + class CsvTimeSeriesMetaInformationSourceIT extends Specification implements CsvTestDataMeta { @Shared CsvTimeSeriesMetaInformationSource source @@ -22,13 +24,13 @@ class CsvTimeSeriesMetaInformationSourceIT extends Specification implements CsvT def "A CSV time series meta information source returns correct mapping of time series"() { given: def expectedTimeSeries = Set.of( - new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("2fcb3e53-b94a-4b96-bea4-c469e499f1a1"), ColumnScheme.ENERGY_PRICE, 'its_c_2fcb3e53-b94a-4b96-bea4-c469e499f1a1'), - new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("76c9d846-797c-4f07-b7ec-2245f679f5c7"), ColumnScheme.ACTIVE_POWER_AND_HEAT_DEMAND, 'its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7'), - new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0"), ColumnScheme.HEAT_DEMAND, 'its_h_c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0'), - new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5"), ColumnScheme.ACTIVE_POWER, 'its_p_9185b8c1-86ba-4a16-8dea-5ac898e8caa5'), - new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26"), ColumnScheme.APPARENT_POWER, 'its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26'), - new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("46be1e57-e4ed-4ef7-95f1-b2b321cb2047"), ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND, 'its_pqh_46be1e57-e4ed-4ef7-95f1-b2b321cb2047'), - new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("1061af70-1c03-46e1-b960-940b956c429f"), ColumnScheme.APPARENT_POWER, 'its_pq_1061af70-1c03-46e1-b960-940b956c429f') + new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("2fcb3e53-b94a-4b96-bea4-c469e499f1a1"), ColumnScheme.ENERGY_PRICE, Path.of('its_c_2fcb3e53-b94a-4b96-bea4-c469e499f1a1')), + new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("76c9d846-797c-4f07-b7ec-2245f679f5c7"), ColumnScheme.ACTIVE_POWER_AND_HEAT_DEMAND, Path.of('its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7')), + new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0"), ColumnScheme.HEAT_DEMAND, Path.of('its_h_c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0')), + new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5"), ColumnScheme.ACTIVE_POWER, Path.of('its_p_9185b8c1-86ba-4a16-8dea-5ac898e8caa5')), + new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26"), ColumnScheme.APPARENT_POWER, Path.of('its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26')), + new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("46be1e57-e4ed-4ef7-95f1-b2b321cb2047"), ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND, Path.of('its_pqh_46be1e57-e4ed-4ef7-95f1-b2b321cb2047')), + new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("1061af70-1c03-46e1-b960-940b956c429f"), ColumnScheme.APPARENT_POWER, Path.of('its_pq_1061af70-1c03-46e1-b960-940b956c429f')) ) when: diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceIT.groovy index 5a7002df8..de1857954 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceIT.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceIT.groovy @@ -5,6 +5,8 @@ */ package edu.ie3.datamodel.io.source.csv +import java.nio.file.Path + import static edu.ie3.test.common.TimeSeriesSourceTestData.* import edu.ie3.datamodel.exceptions.SourceException @@ -25,12 +27,12 @@ class CsvTimeSeriesSourceIT extends Specification implements CsvTestDataMeta { def setup() { factory = new TimeBasedSimpleValueFactory<>(HeatAndPValue) - source = new CsvTimeSeriesSource(";", timeSeriesFolderPath, new FileNamingStrategy(), UUID.fromString("76c9d846-797c-4f07-b7ec-2245f679f5c7"), "its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7", HeatAndPValue, factory) + source = new CsvTimeSeriesSource(";", timeSeriesFolderPath, new FileNamingStrategy(), UUID.fromString("76c9d846-797c-4f07-b7ec-2245f679f5c7"), Path.of("its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7"), HeatAndPValue, factory) } def "A csv time series source throw an Exception, if the file cannot be found"() { given: - def filePath = "file/not/found.csv" + def filePath = Path.of("file/not/found.csv") when: source.buildIndividualTimeSeries(UUID.fromString("fbc59b5b-9307-4fb4-a406-c1f08f26fee5"), filePath, { null }) @@ -43,11 +45,11 @@ class CsvTimeSeriesSourceIT extends Specification implements CsvTestDataMeta { def "A csv time series source is able to read in a proper file correctly"() { given: - def filePath = "its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7" + def filePath = Path.of("its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7") def tsUuid = UUID.fromString("76c9d846-797c-4f07-b7ec-2245f679f5c7") when: - def actual = source.buildIndividualTimeSeries(tsUuid, filePath, { source.buildTimeBasedValue(it, HeatAndPValue, factory) }) + def actual = source.buildIndividualTimeSeries(tsUuid, filePath, { source.createTimeBasedValue(it) }) then: noExceptionThrown() @@ -56,7 +58,7 @@ class CsvTimeSeriesSourceIT extends Specification implements CsvTestDataMeta { def "Construction a csv time series source with malicious parameters, leads to IllegalArgumentException"() { when: - new CsvTimeSeriesSource(";", timeSeriesFolderPath, new FileNamingStrategy(), UUID.fromString("fbc59b5b-9307-4fb4-a406-c1f08f26fee5"), "file/not/found", HeatAndPValue, factory) + new CsvTimeSeriesSource(";", timeSeriesFolderPath, new FileNamingStrategy(), UUID.fromString("fbc59b5b-9307-4fb4-a406-c1f08f26fee5"), Path.of("file/not/found"), HeatAndPValue, factory) then: def e = thrown(IllegalArgumentException) diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceTest.groovy index d2ef940e6..5eaaa914e 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceTest.groovy @@ -9,6 +9,8 @@ import edu.ie3.datamodel.io.csv.CsvIndividualTimeSeriesMetaInformation import edu.ie3.datamodel.io.naming.FileNamingStrategy import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme +import java.nio.file.Path + import static edu.ie3.datamodel.models.StandardUnits.ENERGY_PRICE import edu.ie3.datamodel.exceptions.SourceException @@ -26,7 +28,7 @@ class CsvTimeSeriesSourceTest extends Specification implements CsvTestDataMeta { def "The csv time series source is able to build time based values from simple data"() { given: def factory = new TimeBasedSimpleValueFactory(EnergyPriceValue) - def source = new CsvTimeSeriesSource(";", timeSeriesFolderPath, new FileNamingStrategy(), UUID.fromString("2fcb3e53-b94a-4b96-bea4-c469e499f1a1"), "its_c_2fcb3e53-b94a-4b96-bea4-c469e499f1a1", EnergyPriceValue, factory) + def source = new CsvTimeSeriesSource(";", timeSeriesFolderPath, new FileNamingStrategy(), UUID.fromString("2fcb3e53-b94a-4b96-bea4-c469e499f1a1"), Path.of("its_c_2fcb3e53-b94a-4b96-bea4-c469e499f1a1"), EnergyPriceValue, factory) def time = TimeUtil.withDefaults.toZonedDateTime("2019-01-01 00:00:00") def timeUtil = new TimeUtil(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd'T'HH:mm:ss[.S[S][S]]'Z'") def fieldToValue = [ @@ -41,16 +43,16 @@ class CsvTimeSeriesSourceTest extends Specification implements CsvTestDataMeta { ) when: - def actual = source.buildTimeBasedValue(fieldToValue, EnergyPriceValue, factory) + def actual = source.createTimeBasedValue(fieldToValue) then: - actual.present - actual.get() == expected + actual.success + actual.data.get() == expected } def "The factory method in csv time series source refuses to build time series with unsupported column type"() { given: - def metaInformation = new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("8bc9120d-fb9b-4484-b4e3-0cdadf0feea9"), ColumnScheme.WEATHER, "its_weather_8bc9120d-fb9b-4484-b4e3-0cdadf0feea9") + def metaInformation = new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("8bc9120d-fb9b-4484-b4e3-0cdadf0feea9"), ColumnScheme.WEATHER, Path.of("its_weather_8bc9120d-fb9b-4484-b4e3-0cdadf0feea9")) when: CsvTimeSeriesSource.getSource(";", timeSeriesFolderPath, fileNamingStrategy, metaInformation) @@ -72,12 +74,12 @@ class CsvTimeSeriesSourceTest extends Specification implements CsvTestDataMeta { actual.timeSeries.entries[0].value.class == valueClass where: - uuid | columnScheme | path || amountOfEntries | valueClass - UUID.fromString("2fcb3e53-b94a-4b96-bea4-c469e499f1a1") | ColumnScheme.ENERGY_PRICE | "its_c_2fcb3e53-b94a-4b96-bea4-c469e499f1a1" || 2 | EnergyPriceValue - UUID.fromString("c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0") | ColumnScheme.HEAT_DEMAND | "its_h_c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0" || 2 | HeatDemandValue - UUID.fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5") | ColumnScheme.ACTIVE_POWER | "its_p_9185b8c1-86ba-4a16-8dea-5ac898e8caa5" || 2 | PValue - UUID.fromString("76c9d846-797c-4f07-b7ec-2245f679f5c7") | ColumnScheme.ACTIVE_POWER_AND_HEAT_DEMAND | "its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7" || 2 | HeatAndPValue - UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") | ColumnScheme.APPARENT_POWER | "its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26" || 2 | SValue - UUID.fromString("46be1e57-e4ed-4ef7-95f1-b2b321cb2047") | ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND | "its_pqh_46be1e57-e4ed-4ef7-95f1-b2b321cb2047" || 2 | HeatAndSValue + uuid | columnScheme | path || amountOfEntries | valueClass + UUID.fromString("2fcb3e53-b94a-4b96-bea4-c469e499f1a1") | ColumnScheme.ENERGY_PRICE | Path.of("its_c_2fcb3e53-b94a-4b96-bea4-c469e499f1a1") || 2 | EnergyPriceValue + UUID.fromString("c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0") | ColumnScheme.HEAT_DEMAND | Path.of("its_h_c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0") || 2 | HeatDemandValue + UUID.fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5") | ColumnScheme.ACTIVE_POWER | Path.of("its_p_9185b8c1-86ba-4a16-8dea-5ac898e8caa5") || 2 | PValue + UUID.fromString("76c9d846-797c-4f07-b7ec-2245f679f5c7") | ColumnScheme.ACTIVE_POWER_AND_HEAT_DEMAND | Path.of("its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7") || 2 | HeatAndPValue + UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") | ColumnScheme.APPARENT_POWER | Path.of("its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26") || 2 | SValue + UUID.fromString("46be1e57-e4ed-4ef7-95f1-b2b321cb2047") | ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND | Path.of("its_pqh_46be1e57-e4ed-4ef7-95f1-b2b321cb2047") || 2 | HeatAndSValue } } \ No newline at end of file diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTypeSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTypeSourceTest.groovy index d5a084201..2aa6331f5 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTypeSourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTypeSourceTest.groovy @@ -6,6 +6,7 @@ package edu.ie3.datamodel.io.source.csv import edu.ie3.datamodel.io.naming.FileNamingStrategy +import edu.ie3.datamodel.io.source.TypeSource import edu.ie3.datamodel.models.input.OperatorInput import spock.lang.Specification import edu.ie3.test.common.GridTestData as gtd @@ -16,7 +17,7 @@ class CsvTypeSourceTest extends Specification implements CsvTestDataMeta { def "A CsvTypeSource should read and handle valid 2W Transformer type file as expected"() { given: - def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + def typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, new FileNamingStrategy())) expect: def transformer2WTypes = typeSource.transformer2WTypes @@ -45,7 +46,7 @@ class CsvTypeSourceTest extends Specification implements CsvTestDataMeta { UUID.fromString("f15105c4-a2de-4ab8-a621-4bc98e372d92"), "Univ.-Prof. Dr. rer. hort. Klaus-Dieter Brokkoli") def secondOperator = new OperatorInput( UUID.fromString("8f9682df-0744-4b58-a122-f0dc730f6510"), "TestOperator") - def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + def typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, new FileNamingStrategy())) expect: def operators = typeSource.operators @@ -57,7 +58,7 @@ class CsvTypeSourceTest extends Specification implements CsvTestDataMeta { def "A CsvTypeSource should read and handle valid line type file as expected"() { given: - def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + def typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, new FileNamingStrategy())) expect: def lineTypes = typeSource.lineTypes @@ -73,7 +74,7 @@ class CsvTypeSourceTest extends Specification implements CsvTestDataMeta { def "A CsvTypeSource should read and handle valid 3W Transformer type file as expected"() { given: - def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + def typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, new FileNamingStrategy())) expect: def transformer3WTypes = typeSource.transformer3WTypes @@ -102,7 +103,7 @@ class CsvTypeSourceTest extends Specification implements CsvTestDataMeta { def "A CsvTypeSource should read and handle valid bm type file as expected"() { given: - def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + def typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, new FileNamingStrategy())) expect: def bmTypes = typeSource.bmTypes @@ -117,7 +118,7 @@ class CsvTypeSourceTest extends Specification implements CsvTestDataMeta { def "A CsvTypeSource should read and handle valid chp type file as expected"() { given: - def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + def typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, new FileNamingStrategy())) expect: def chpTypes = typeSource.chpTypes @@ -134,7 +135,7 @@ class CsvTypeSourceTest extends Specification implements CsvTestDataMeta { def "A CsvTypeSource should read and handle valid hp type file as expected"() { given: - def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + def typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, new FileNamingStrategy())) expect: def hpTypes = typeSource.hpTypes @@ -149,7 +150,7 @@ class CsvTypeSourceTest extends Specification implements CsvTestDataMeta { def "A CsvTypeSource should read and handle valid storage type file as expected"() { given: - def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + def typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, new FileNamingStrategy())) expect: def storageTypes = typeSource.storageTypes @@ -170,7 +171,7 @@ class CsvTypeSourceTest extends Specification implements CsvTestDataMeta { def "A CsvTypeSource should read and handle valid wec type file as expected"() { given: - def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + def typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, new FileNamingStrategy())) expect: def wecTypes = typeSource.wecTypes @@ -187,12 +188,11 @@ class CsvTypeSourceTest extends Specification implements CsvTestDataMeta { //check for the individual points if (wecTypes.first().cpCharacteristic.points.iterator().hasNext()) wecTypes.first().cpCharacteristic.points.iterator().next() == sptd.wecType.cpCharacteristic.points.iterator().next() - } def "A CsvTypeSource should read and handle valid ev type file as expected"() { given: - def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + def typeSource = new TypeSource(new CsvDataSource(csvSep, typeFolderPath, new FileNamingStrategy())) expect: def evTypes = typeSource.evTypes diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/GridIoIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/GridIoIT.groovy new file mode 100644 index 000000000..4b8ebf9c3 --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/GridIoIT.groovy @@ -0,0 +1,96 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.source.csv + +import edu.ie3.datamodel.exceptions.FileException +import edu.ie3.datamodel.io.naming.DefaultDirectoryHierarchy +import edu.ie3.datamodel.io.naming.EntityPersistenceNamingStrategy +import edu.ie3.datamodel.io.naming.FileNamingStrategy +import edu.ie3.datamodel.io.sink.CsvFileSink +import edu.ie3.util.io.FileIOUtils +import spock.lang.Shared +import spock.lang.Specification + +import java.nio.file.Files +import java.nio.file.Path + +/** + * Testing whether PSDM CSV grids are equal when serialized and deserialized sequentially. + * Grid data should not change when written out or parsed. + */ +class GridIoIT extends Specification implements CsvTestDataMeta { + + @Shared + Path tempDirectory + + @Shared + CsvFileSink sinkFlat + + @Shared + CsvFileSink sinkHierarchic + + def setupSpec() { + FileNamingStrategy hierarchicNamingStrategy = new FileNamingStrategy( + new EntityPersistenceNamingStrategy(), + new DefaultDirectoryHierarchy(Path.of("output"), "vn_simona")) + tempDirectory = Files.createTempDirectory("GridIoIT") + sinkFlat = new CsvFileSink(tempDirectory.toAbsolutePath()) + sinkHierarchic = new CsvFileSink(tempDirectory.toAbsolutePath(), hierarchicNamingStrategy, ",") + } + + def cleanupSpec() { + sinkFlat.shutdown() + sinkHierarchic.shutdown() + FileIOUtils.deleteRecursively(tempDirectory) + } + + def "Input flat JointGridContainer equals Output flat JointGridContainer."() { + + given: + // create joint grid container + def gridName = "vn_simona" + def separator = "," + def firstGridContainer = CsvJointGridContainerSource.read(gridName, separator, jointGridFolderPath, false) + + when: + // write files from joint grid container in output directory + sinkFlat.persistJointGrid(firstGridContainer) + // create second grid container from output folder + def secondGridContainer = CsvJointGridContainerSource.read(gridName, separator, tempDirectory.toAbsolutePath(), false) + + then: + // compare input and output joint grid container + firstGridContainer == secondGridContainer + } + + def "Input flat JointGridContainer equals Output hierarchic JointGridContainer."() { + given: + // create joint grid container + def gridName = "vn_simona" + def separator = "," + def firstGridContainer = CsvJointGridContainerSource.read(gridName, separator, jointGridFolderPath, false) + + when: + sinkHierarchic.persistJointGrid(firstGridContainer) + def secondGridContainer = CsvJointGridContainerSource.read(gridName, separator, tempDirectory.toAbsolutePath(), true) + + then: + // compare input and output joint grid container + firstGridContainer == secondGridContainer + } + + def "CsvJointGridContainerSource throws exception if a hierarchic grid is expected but a flat grid is presented."() { + given: + def gridName = "vn_simona" + def separator = "," + + when: + CsvJointGridContainerSource.read(gridName, separator, jointGridFolderPath, true) + + then: + thrown(FileException) + } +} diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/influxdb/InfluxDbWeatherSourceCosmoIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/influxdb/InfluxDbWeatherSourceCosmoIT.groovy index 56f54b4e2..58f59e065 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/influxdb/InfluxDbWeatherSourceCosmoIT.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/influxdb/InfluxDbWeatherSourceCosmoIT.groovy @@ -54,7 +54,7 @@ class InfluxDbWeatherSourceCosmoIT extends Specification implements TestContaine def connector = new InfluxDbConnector(influxDbContainer.url,"test_weather", "test_scenario") then: - connector.connectionValid + connector.isConnectionValid() } def "An InfluxDbWeatherSource can read and correctly parse a single value for a specific date and coordinate"() { diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/influxdb/InfluxDbWeatherSourceIconIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/influxdb/InfluxDbWeatherSourceIconIT.groovy index b99687cf2..ef1799a52 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/influxdb/InfluxDbWeatherSourceIconIT.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/influxdb/InfluxDbWeatherSourceIconIT.groovy @@ -52,7 +52,7 @@ class InfluxDbWeatherSourceIconIT extends Specification implements WeatherSource def connector = new InfluxDbConnector(influxDbContainer.url, "test_weather", "test_scenario") then: - connector.connectionValid + connector.isConnectionValid() } def "An InfluxDbWeatherSource can read and correctly parse a single value for a specific date and coordinate"() { diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlIdCoordinateSourceIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlIdCoordinateSourceIT.groovy new file mode 100644 index 000000000..39be186ff --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlIdCoordinateSourceIT.groovy @@ -0,0 +1,190 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.source.sql + +import edu.ie3.datamodel.io.connectors.SqlConnector +import edu.ie3.datamodel.io.factory.timeseries.SqlIdCoordinateFactory +import edu.ie3.test.helper.TestContainerHelper +import edu.ie3.util.geo.CoordinateDistance +import edu.ie3.util.geo.GeoUtils +import org.locationtech.jts.geom.Coordinate +import org.testcontainers.containers.Container +import org.testcontainers.containers.PostgreSQLContainer +import org.testcontainers.spock.Testcontainers +import org.testcontainers.utility.DockerImageName +import org.testcontainers.utility.MountableFile +import spock.lang.Shared +import spock.lang.Specification +import tech.units.indriya.quantity.Quantities +import tech.units.indriya.unit.Units + +@Testcontainers +class SqlIdCoordinateSourceIT extends Specification implements TestContainerHelper { + + @Shared + PostgreSQLContainer postgisSQLContainer = new PostgreSQLContainer(DockerImageName.parse("postgis/postgis:14-3.3").asCompatibleSubstituteFor("postgres")) + + @Shared + SqlIdCoordinateSource source + + static String schemaName = "public" + static String coordinateTableName = "coordinates" + + def setupSpec() { + // Copy sql import script into docker + MountableFile sqlImportFile = getMountableFile("_coordinates/coordinates.sql") + postgisSQLContainer.copyFileToContainer(sqlImportFile, "/home/coordinates.sql") + // Execute import script + Container.ExecResult res = postgisSQLContainer.execInContainer("psql", "-Utest", "-f/home/coordinates.sql") + assert res.stderr.empty + + def connector = new SqlConnector(postgisSQLContainer.jdbcUrl, postgisSQLContainer.username, postgisSQLContainer.password) + def coordinatesFactory = new SqlIdCoordinateFactory() + source = new SqlIdCoordinateSource(connector, schemaName, coordinateTableName, coordinatesFactory) + } + + def "A SqlIdCoordinateSource can read a single coordinate"(){ + given: + def expectedValue = new Coordinate(7.438, 51.5) + + when: + def receivedValue = source.getCoordinate(67775) + + then: + def coordinate = receivedValue.get().coordinate + coordinate == expectedValue + } + + def "A SqlIdCoordinateSource will return nothing if an id is not present"(){ + given: + def receivedValue = source.getCoordinate(0) + + expect: + receivedValue.isEmpty() + } + + def "A SqlIdCoordinateSource can read a list of coordinates"(){ + given: + def expectedValues = [ + new Coordinate(7.438, 51.5), + new Coordinate(7.375, 51.5) + ] + + when: + int[] arr = new int[]{ + 67775, 531137 + } + def receivedValues = source.getCoordinates(arr) + + then: + ArrayList points = receivedValues.coordinate + + points == expectedValues + } + + def "A SqlIdCoordinateSource can return the id of a point"(){ + given: + int id = 67775 + + when: + def receivedValue = source.getId(GeoUtils.buildPoint(51.5, 7.438)) + + then: + receivedValue.get() == id + } + + def "A SqlIdCoordinateSource will return nothing if a coordinate is not present"(){ + given: + def coordinate = GeoUtils.buildPoint(0.0 ,0.0 ) + + when: + def receivedValue = source.getId(coordinate) + + then: + receivedValue.isEmpty() + } + + def "A SqlIdCoordinateSource can return all coordinates"() { + given: + def expectedValues = [ + GeoUtils.buildPoint(51.5,7.438), + GeoUtils.buildPoint(51.5,7.375), + GeoUtils.buildPoint(51.438,7.438), + GeoUtils.buildPoint(51.438,7.375) + ] + + when: + def receivedValues = source.getAllCoordinates() + + then: + + receivedValues == expectedValues + } + + def "A SqlIdCoordinateSource can return the nearest n coordinates if n coordinates are in the given radius"(){ + given: + def basePoint = GeoUtils.buildPoint(51.5, 7.38) + def distance = Quantities.getQuantity(200000, Units.METRE) + + when: + def actualDistances = source.getClosestCoordinates(basePoint, 3, distance) + + then: + actualDistances.size() == 3 + } + + def "A SqlIdCoordinateSource will return the nearest m coordinates if less than n coordinates are in the given radius"(){ + given: + def basePoint = GeoUtils.buildPoint(51.5, 7.38) + def distance = Quantities.getQuantity(1000, Units.METRE) + + when: + def actualDistances = source.getClosestCoordinates(basePoint, 2, distance) + + then: + actualDistances.size() == 1 + } + + def "A SqlIdCoordinateSource will return the nearest n coordinates of the nearest n neighbours if no coordinates are in the given radius" () { + given: + def basePoint = GeoUtils.buildPoint(39.617162, 1.438029) + def expectedValues = [ + GeoUtils.buildPoint(51.5,7.438), + GeoUtils.buildPoint(51.5,7.375), + GeoUtils.buildPoint(51.438,7.438), + GeoUtils.buildPoint(51.438,7.375) + ] + def distance = Quantities.getQuantity(1000, Units.METRE) + + when: + def receivedValues = source.getClosestCoordinates(basePoint, 2, distance) + + then: + for(CoordinateDistance coordinateDistance : receivedValues){ + expectedValues.contains(coordinateDistance.coordinateB) + } + } + + def "A SqlIdCoordinateSource will return the nearest n coordinates of all available coordinates if no coordinates are in the given radius and n is greater than the number of all coordinates"(){ + given: + def basePoint = GeoUtils.buildPoint(39.617162, 1.438029) + def expectedValues = [ + GeoUtils.buildPoint(51.5,7.438), + GeoUtils.buildPoint(51.5,7.375), + GeoUtils.buildPoint(51.438,7.438), + GeoUtils.buildPoint(51.438,7.375) + ] + def distance = Quantities.getQuantity(1000, Units.METRE) + + when: + def receivedValues = source.getClosestCoordinates(basePoint, 5, distance) + + then: + for(CoordinateDistance coordinateDistance : receivedValues){ + expectedValues.contains(coordinateDistance.coordinateB) + } + } +} diff --git a/src/test/groovy/edu/ie3/datamodel/models/input/connector/LineInputTest.groovy b/src/test/groovy/edu/ie3/datamodel/models/input/connector/LineInputTest.groovy index bbed867e5..1a8cdbec6 100644 --- a/src/test/groovy/edu/ie3/datamodel/models/input/connector/LineInputTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/models/input/connector/LineInputTest.groovy @@ -56,7 +56,6 @@ class LineInputTest extends Specification { "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.49228],[7.411111, 51.49228]]}" | _ "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.49228],[7.411111, 51.49228],[7.411111, 51.49228],[7.411111, 51.49228]]}" | _ "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.49228],[7.411111, 51.49228],[7.311111, 51.49228],[7.511111, 51.49228]]}" | _ - } def "A LineInput copy method should work as expected"() { diff --git a/src/test/groovy/edu/ie3/datamodel/models/input/container/GraphicElementsTest.groovy b/src/test/groovy/edu/ie3/datamodel/models/input/container/GraphicElementsTest.groovy index 39ed3a122..52820eb61 100644 --- a/src/test/groovy/edu/ie3/datamodel/models/input/container/GraphicElementsTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/models/input/container/GraphicElementsTest.groovy @@ -22,4 +22,24 @@ class GraphicElementsTest extends Specification { then: newlyCreatedGraphicElements == graphicElements } + + def "A GraphicElements' copy method should work as expected"() { + given: + def graphicElements = new GraphicElements( + Collections.singleton(GridTestData.nodeGraphicC), + Collections.singleton(GridTestData.lineGraphicCtoD) + ) + + def modifiedLineGraphic = GridTestData.lineGraphicCtoD.copy().uuid(UUID.randomUUID()).build() + + when: + def modifiedGraphicElements = graphicElements.copy() + .nodeGraphics(Set.of(GridTestData.nodeGraphicD)) + .lineGraphics(Set.of(modifiedLineGraphic)) + .build() + + then: + modifiedGraphicElements.nodeGraphics.first() == GridTestData.nodeGraphicD + modifiedGraphicElements.lineGraphics.first() == modifiedLineGraphic + } } diff --git a/src/test/groovy/edu/ie3/datamodel/models/input/container/JointGridContainerTest.groovy b/src/test/groovy/edu/ie3/datamodel/models/input/container/JointGridContainerTest.groovy index a23f72ef3..0baa37cc0 100644 --- a/src/test/groovy/edu/ie3/datamodel/models/input/container/JointGridContainerTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/models/input/container/JointGridContainerTest.groovy @@ -5,10 +5,11 @@ */ package edu.ie3.datamodel.models.input.container -import static edu.ie3.test.common.SystemParticipantTestData.emptySystemParticipants import edu.ie3.test.common.GridTestData +import edu.ie3.test.common.SystemParticipantTestData import spock.lang.Specification +import static edu.ie3.test.common.SystemParticipantTestData.emptySystemParticipants class JointGridContainerTest extends Specification { private static final GRID_NAME = "single_grid" @@ -34,4 +35,26 @@ class JointGridContainerTest extends Specification { noExceptionThrown() jointGridContainer.subGridTopologyGraph.vertexSet().size() == 1 } + + def "A JointGridContainer's copy method should work as expected"() { + given: + def jointGridContainer = new JointGridContainer(GRID_NAME, RAW_GRID, emptySystemParticipants, GRAPHIC_ELEMENTS) + def rawGrid = new RawGridElements(List.of(GridTestData.lineAtoB, GridTestData.transformerAtoBtoC)) + def systemParticipants = new SystemParticipants(List.of(SystemParticipantTestData.bmInput)) + def graphics = new GraphicElements(Set.of(GridTestData.nodeGraphicD), Set.of(GridTestData.lineGraphicCtoD)) + + when: + def modifiedJointGridContainer = jointGridContainer.copy() + .gridName("new grid name") + .rawGrid(rawGrid) + .systemParticipants(systemParticipants) + .graphics(graphics) + .build() + + then: + modifiedJointGridContainer.gridName == "new grid name" + modifiedJointGridContainer.rawGrid == rawGrid + modifiedJointGridContainer.systemParticipants == systemParticipants + modifiedJointGridContainer.graphics == graphics + } } diff --git a/src/test/groovy/edu/ie3/datamodel/models/input/container/RawGridElementsTest.groovy b/src/test/groovy/edu/ie3/datamodel/models/input/container/RawGridElementsTest.groovy index bf6b1fd7f..4af66aec1 100644 --- a/src/test/groovy/edu/ie3/datamodel/models/input/container/RawGridElementsTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/models/input/container/RawGridElementsTest.groovy @@ -6,6 +6,7 @@ package edu.ie3.datamodel.models.input.container import edu.ie3.test.common.ComplexTopology +import edu.ie3.test.common.GridTestData import spock.lang.Specification @@ -21,4 +22,27 @@ class RawGridElementsTest extends Specification { then: newlyCreatedRawGrid == rawGrid } + + def "A RawGridElements' copy method should work as expected"() { + given: + def emptyRawGrid = new RawGridElements([] as Set, [] as Set, [] as Set, [] as Set, [] as Set, [] as Set) + + when: + def modifiedRawGrid = emptyRawGrid.copy() + .nodes(Set.of(GridTestData.nodeA)) + .lines(Set.of(GridTestData.lineAtoB)) + .transformers2Ws(Set.of(GridTestData.transformerBtoD)) + .transformer3Ws(Set.of(GridTestData.transformerAtoBtoC)) + .switches(Set.of(GridTestData.switchAtoB)) + .measurementUnits(Set.of(GridTestData.measurementUnitInput)) + .build() + + then: + modifiedRawGrid.nodes.first() == GridTestData.nodeA + modifiedRawGrid.lines.first() == GridTestData.lineAtoB + modifiedRawGrid.transformer2Ws.first() == GridTestData.transformerBtoD + modifiedRawGrid.transformer3Ws.first() == GridTestData.transformerAtoBtoC + modifiedRawGrid.switches.first() == GridTestData.switchAtoB + modifiedRawGrid.measurementUnits.first() == GridTestData.measurementUnitInput + } } diff --git a/src/test/groovy/edu/ie3/datamodel/models/input/container/SystemParticipantsTest.groovy b/src/test/groovy/edu/ie3/datamodel/models/input/container/SystemParticipantsTest.groovy index 661ff9fa7..d24239bbd 100644 --- a/src/test/groovy/edu/ie3/datamodel/models/input/container/SystemParticipantsTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/models/input/container/SystemParticipantsTest.groovy @@ -32,4 +32,61 @@ class SystemParticipantsTest extends Specification { then: newlyCreatedSystemParticipants == systemParticipants } + + def "A SystemParticipants' copy method should work as expected"() { + given: + def systemParticipants = new SystemParticipants( + Collections.singleton(SystemParticipantTestData.bmInput), + Collections.singleton(SystemParticipantTestData.chpInput), + Collections.singleton(SystemParticipantTestData.evcsInput), + Collections.singleton(SystemParticipantTestData.evInput), + Collections.singleton(SystemParticipantTestData.fixedFeedInInput), + Collections.singleton(SystemParticipantTestData.hpInput), + Collections.singleton(SystemParticipantTestData.loadInput), + Collections.singleton(SystemParticipantTestData.pvInput), + Collections.singleton(SystemParticipantTestData.storageInput), + Collections.singleton(SystemParticipantTestData.wecInput), + Collections.singleton(SystemParticipantTestData.emInput) + ) + + def modifiedBmInput = SystemParticipantTestData.bmInput.copy().id("modified").build() + def modifiedChpInput = SystemParticipantTestData.chpInput.copy().id("modified").build() + def modifiedEvCSInput = SystemParticipantTestData.evcsInput.copy().id("modified").build() + def modifiedEvInput = SystemParticipantTestData.evInput.copy().id("modified").build() + def modifiedFixedFeedInInput = SystemParticipantTestData.fixedFeedInInput.copy().id("modified").build() + def modifiedHpInput = SystemParticipantTestData.hpInput.copy().id("modified").build() + def modifiedLoadInput = SystemParticipantTestData.loadInput.copy().id("modified").build() + def modifiedPvInput = SystemParticipantTestData.pvInput.copy().id("modified").build() + def modifiedStorageInput = SystemParticipantTestData.storageInput.copy().id("modified").build() + def modifiedWecInput = SystemParticipantTestData.wecInput.copy().id("modified").build() + def modifiedEmInput = SystemParticipantTestData.emInput.copy().id("modified").build() + + when: + def modifiedSystemParticipants = systemParticipants.copy() + .bmPlants(Set.of(modifiedBmInput)) + .chpPlants(Set.of(modifiedChpInput)) + .evCS(Set.of(modifiedEvCSInput)) + .evs(Set.of(modifiedEvInput)) + .fixedFeedIn(Set.of(modifiedFixedFeedInInput)) + .heatPumps(Set.of(modifiedHpInput)) + .loads(Set.of(modifiedLoadInput)) + .pvPlants(Set.of(modifiedPvInput)) + .storages(Set.of(modifiedStorageInput)) + .wecPlants(Set.of(modifiedWecInput)) + .emSystems(Set.of(modifiedEmInput)) + .build() + + then: + modifiedSystemParticipants.bmPlants.first() == modifiedBmInput + modifiedSystemParticipants.chpPlants.first() == modifiedChpInput + modifiedSystemParticipants.evCS.first() == modifiedEvCSInput + modifiedSystemParticipants.evs.first() == modifiedEvInput + modifiedSystemParticipants.fixedFeedIns.first() == modifiedFixedFeedInInput + modifiedSystemParticipants.heatPumps.first() == modifiedHpInput + modifiedSystemParticipants.loads.first() == modifiedLoadInput + modifiedSystemParticipants.pvPlants.first() == modifiedPvInput + modifiedSystemParticipants.storages.first() == modifiedStorageInput + modifiedSystemParticipants.wecPlants.first() == modifiedWecInput + modifiedSystemParticipants.emSystems.first() == modifiedEmInput + } } diff --git a/src/test/groovy/edu/ie3/datamodel/models/input/container/ThermalGridTest.groovy b/src/test/groovy/edu/ie3/datamodel/models/input/container/ThermalGridTest.groovy index 209539bae..fb8485acb 100644 --- a/src/test/groovy/edu/ie3/datamodel/models/input/container/ThermalGridTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/models/input/container/ThermalGridTest.groovy @@ -31,4 +31,25 @@ class ThermalGridTest extends Specification { actualAllEntities.containsAll(thermalHouses) actualAllEntities.containsAll(thermalStorages) } + + def "A ThermalGrid's copy method should work as expected"() { + given: + def thermalBus = Mock(ThermalBusInput) + def thermalHouses = [] + def thermalStorages = [] + def thermalGrid = new ThermalGrid(thermalBus, thermalHouses, thermalStorages) + + def modifiedHouses = [Mock(ThermalHouseInput)] + def modifiedStorages = [Mock(CylindricalStorageInput)] + + when: + def modifiedThermalGrid = thermalGrid.copy() + .houses(modifiedHouses as Set) + .storages(modifiedStorages as Set) + .build() + + then: + modifiedThermalGrid.houses().first() == modifiedHouses.get(0) + modifiedThermalGrid.storages().first() == modifiedStorages.get(0) + } } diff --git a/src/test/groovy/edu/ie3/datamodel/models/input/container/ThermalUnitsTest.groovy b/src/test/groovy/edu/ie3/datamodel/models/input/container/ThermalUnitsTest.groovy index 914c4478a..adaec4f55 100644 --- a/src/test/groovy/edu/ie3/datamodel/models/input/container/ThermalUnitsTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/models/input/container/ThermalUnitsTest.groovy @@ -28,4 +28,24 @@ class ThermalUnitsTest extends Specification { actualAllEntities.containsAll(thermalHouses) actualAllEntities.containsAll(thermalStorages) } + + def "A ThermalUnits' copy method should work as expected"() { + given: + def thermalHouses = [] + def thermalStorages = [] + def thermalUnits = new ThermalUnits(thermalHouses, thermalStorages) + + def modifiedHouses = [Mock(ThermalHouseInput)] + def modifiedStorages = [Mock(CylindricalStorageInput)] + + when: + def modifiedThermalUnits = thermalUnits.copy() + .houses(modifiedHouses as Set) + .storages(modifiedStorages as Set) + .build() + + then: + modifiedThermalUnits.houses().first() == modifiedHouses.get(0) + modifiedThermalUnits.storages().first() == modifiedStorages.get(0) + } } diff --git a/src/test/groovy/edu/ie3/datamodel/models/input/graphics/GraphicInputTest.groovy b/src/test/groovy/edu/ie3/datamodel/models/input/graphics/GraphicInputTest.groovy index 65fb48587..f7225fcbc 100644 --- a/src/test/groovy/edu/ie3/datamodel/models/input/graphics/GraphicInputTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/models/input/graphics/GraphicInputTest.groovy @@ -17,6 +17,11 @@ class GraphicInputTest extends Specification { DummyGraphicObject(UUID uuid, String graphicLayer, LineString path) { super(uuid, graphicLayer, path) } + + @Override + GraphicInputCopyBuilder copy() { + throw new UnsupportedOperationException("This is a dummy class") + } } @@ -36,6 +41,5 @@ class GraphicInputTest extends Specification { "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.49228],[7.411111, 51.49228]]}" | _ "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.49228],[7.411111, 51.49228],[7.411111, 51.49228],[7.411111, 51.49228]]}" | _ "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.49228],[7.411111, 51.49228],[7.311111, 51.49228],[7.511111, 51.49228]]}" | _ - } } diff --git a/src/test/groovy/edu/ie3/datamodel/utils/ContainerNodeUpdateUtilTest.groovy b/src/test/groovy/edu/ie3/datamodel/utils/ContainerNodeUpdateUtilTest.groovy index 93ecd8cef..4e8421f73 100644 --- a/src/test/groovy/edu/ie3/datamodel/utils/ContainerNodeUpdateUtilTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/utils/ContainerNodeUpdateUtilTest.groovy @@ -56,7 +56,6 @@ class ContainerNodeUpdateUtilTest extends Specification { SampleJointGrid.nodeA | SampleJointGrid.nodeA.copy().geoPosition((Point) geoJsonReader.read("{ \"type\": \"Point\", \"coordinates\": [16.592276813887139, 49.37770599548332] }")).build() || (Point) geoJsonReader.read("{ \"type\": \"Point\", \"coordinates\": [16.592276813887139, 49.37770599548332] }") SampleJointGrid.nodeD | SampleJointGrid.nodeD.copy().geoPosition((Point) geoJsonReader.read("{ \"type\": \"Point\", \"coordinates\": [16.592276813887139, 50.37770599548332] }")).build() || (Point) geoJsonReader.read("{ \"type\": \"Point\", \"coordinates\": [16.592276813887139, 50.37770599548332] }") SampleJointGrid.nodeG | SampleJointGrid.nodeG.copy().geoPosition((Point) geoJsonReader.read("{ \"type\": \"Point\", \"coordinates\": [16.592276813887139, 25.37770599548332] }")).build() || (Point) geoJsonReader.read("{ \"type\": \"Point\", \"coordinates\": [16.592276813887139, 25.37770599548332] }") - } def "A ContainerUpdateUtil should update chained 2w transformers correctly when multiple node updates are provided"() { @@ -102,7 +101,6 @@ class ContainerNodeUpdateUtilTest extends Specification { assert updatedTransformer2WInputs.stream().flatMap({ trafo2w -> Stream.of(trafo2w.getNodeA(), trafo2w.getNodeB()) }) .collect(Collectors.toSet()) == updatedOldToNewNodes.values() as Set } - } def "A ContainerUpdateUtil should update chained 2w and 3w transformers correctly when only one node update is provided"() { @@ -219,7 +217,5 @@ class ContainerNodeUpdateUtilTest extends Specification { resultingGrid.rawGrid.lines.find {line -> line.getId().equals("lineDtoF")}.nodeA.geoPosition == alteredGeoPos resultingGrid.rawGrid.lines.find {line -> line.getId().equals("lineDtoF")}.geoPosition.coordinates.contains(alteredGeoPos.coordinate) - } - } diff --git a/src/test/groovy/edu/ie3/datamodel/utils/ContainerUtilsTest.groovy b/src/test/groovy/edu/ie3/datamodel/utils/ContainerUtilsTest.groovy index d1a4e3eb3..9d1c8f96c 100644 --- a/src/test/groovy/edu/ie3/datamodel/utils/ContainerUtilsTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/utils/ContainerUtilsTest.groovy @@ -376,10 +376,8 @@ class ContainerUtilsTest extends Specification { } assert systemParticipants == unmodifiedSubGrid.systemParticipants - } } - } def "The container util builds the correct sub grid dependency graph"() { diff --git a/src/test/groovy/edu/ie3/datamodel/utils/FileUtilsTest.groovy b/src/test/groovy/edu/ie3/datamodel/utils/FileUtilsTest.groovy new file mode 100644 index 000000000..36741b050 --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/utils/FileUtilsTest.groovy @@ -0,0 +1,65 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.utils + +import edu.ie3.datamodel.io.IoUtil +import spock.lang.Shared +import spock.lang.Specification + +import java.nio.file.Path + +class FileUtilsTest extends Specification { + @Shared + String fileName + + @Shared + Path directory + + def setupSpec() { + fileName = "node_input.csv" + directory = Path.of("test", "grid") + } + + def "A file definition is et up correctly, if an empty path is given" () { + when: + def file = FileUtils.of("name", path) + + then: + file == expectedPath + + where: + path || expectedPath + IoUtil.pathOption("") || Path.of("name") + } + + def "A file definition of a csv file is set up correctly, if the directory path has corrupt file separator" () { + when: + def file = FileUtils.ofCsv(fileName, manipulatedDirectory) + + then: + file.with { + assert it.fileName == Path.of(this.fileName) + assert it == this.directory.resolve(this.fileName) + } + + where: + manipulatedDirectory || expected + Path.of(this.directory.toString(), "/") || this.directory + Path.of(this.directory.toString().replaceAll("[\\\\/]", File.separator == "/" ? "\\\\" : "/")) || this.directory + } + + def "A file definition of a csv file is set up correctly, if the directory path is null" () { + when: + def file = FileUtils.ofCsv(fileName, null) + + then: + file.with { + assert it.fileName == Path.of(this.fileName) + assert it.relativize(it.fileName) == Path.of("") + assert it.parent == null + } + } +} diff --git a/src/test/groovy/edu/ie3/datamodel/utils/TryTest.groovy b/src/test/groovy/edu/ie3/datamodel/utils/TryTest.groovy new file mode 100644 index 000000000..4a0ff0762 --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/utils/TryTest.groovy @@ -0,0 +1,345 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.utils + +import edu.ie3.datamodel.exceptions.FailureException +import edu.ie3.datamodel.exceptions.SourceException +import edu.ie3.datamodel.exceptions.TryException +import spock.lang.Specification + +class TryTest extends Specification { + + def "A method can be applied to a try object"() { + when: + Try actual = Try.of(() -> "success", Exception) + + then: + actual.success + actual.data.get() == "success" + actual.orThrow == "success" + actual.exception == Optional.empty() + } + + def "A failing method can be applied to a try object"() { + when: + Try actual = Try.of(() -> { + throw new SourceException("Exception thrown.") + }, SourceException) + + then: + actual.failure + actual.data == Optional.empty() + actual.exception.get().class == SourceException + actual.exception.get().message == "Exception thrown." + } + + def "A failure is returned if an expected exception type is thrown when using #of()"() { + when: + def exception = new SourceException("source exception") + Try actual = Try.of(() -> { + throw exception + }, SourceException) + + then: + actual.failure + actual.exception.get() == exception + } + + def "A TryException is thrown if an unexpected exception type is thrown when using #of()"() { + when: + Try.of(() -> { + throw new SourceException("source exception") + }, FailureException) + + then: + Exception ex = thrown() + ex.class == TryException + ex.message == "Wrongly caught exception: " + Throwable cause = ex.cause + cause.class == SourceException + cause.message == "source exception" + } + + def "A failure is returned when using Failure#ofVoid() with an exception"() { + when: + def exception = new SourceException("source exception") + Try actual = Try.Failure.ofVoid(exception) + + then: + actual.failure + actual.exception.get() == exception + } + + def "A failure is returned when using Failure#of() with an exception"() { + when: + def exception = new SourceException("source exception") + Try actual = Try.Failure.of(exception) + + then: + actual.failure + actual.exception.get() == exception + } + + def "A failure is returned if an expected exception type is thrown when using Try#ofVoid()"() { + when: + def exception = new SourceException("source exception") + Try actual = Try.ofVoid(() -> { + throw exception + }, SourceException) + + then: + actual.failure + actual.exception.get() == exception + } + + def "A TryException is thrown if an unexpected exception type is thrown when using Try#ofVoid()"() { + when: + Try.ofVoid(() -> { + throw new SourceException("source exception") + }, FailureException) + + then: + Exception ex = thrown() + ex.class == TryException + ex.message == "Wrongly caught exception: " + Throwable cause = ex.cause + cause.class == SourceException + cause.message == "source exception" + } + + def "A Try object can be creates by a boolean and an exception"() { + when: + def ex = new FailureException("failure") + def actual = Try.ofVoid(bool, () -> ex) + + then: + actual.failure == expected + + if (expected) { + actual.exception.get() == ex + } + + where: + bool || expected + true || true + false || false + } + + def "A list of Tries is returned when applying a multiple VoidSupplier to Try#ofVoid()"() { + given: + Try.VoidSupplier one = () -> { + throw new FailureException("failure 1") + } + Try.VoidSupplier two = () -> { + throw new FailureException("failure 2") + } + + when: + List> failures = Try.ofVoid(FailureException, one, two) + + then: + failures.size() == 2 + failures.every { + it.failure + } + } + + def "A TryException is thrown if an unexpected exception type is thrown when using Try#ofVoid() with multiple VoidSuppliers"() { + given: + Try.VoidSupplier one = () -> { + throw new FailureException("failure") + } + Try.VoidSupplier two = () -> { + throw new SourceException("source exception") + } + + when: + Try.ofVoid(FailureException, one, two) + + then: + Exception ex = thrown() + ex.class == TryException + Throwable cause = ex.cause + cause.class == SourceException + cause.message == "source exception" + } + + def "A void method can be applied to a try object"() { + when: + Try actual = Try.ofVoid(() -> null, Exception) + + then: + actual.success + ((Try.Success) actual).empty + actual.data.empty + } + + def "A success object can be resolved with get method"() { + given: + Try success = new Try.Success<>("success") + + when: + String str = success.get() + + then: + str == "success" + } + + def "A failure object can be resolved with get method"() { + given: + Try failure = new Try.Failure<>(new Exception("failure")) + + when: + Exception ex = failure.get() + + then: + ex.message == "failure" + } + + def "An empty Success should work as expected"() { + given: + Try empty = Try.Success.empty() as Try + + expect: + empty.success + empty.data == Optional.empty() + } + + def "A scan for exceptions should work as expected when failures are included"() { + given: + Set> set = Set.of( + new Try.Success<>("one"), + new Try.Failure<>(new Exception("exception")), + new Try.Success<>("two"), + new Try.Success<>("three") + ) + + when: + Try, Exception> scan = Try.scanCollection(set, String) + + then: + scan.failure + scan.exception.get().message == "1 exception(s) occurred within \"String\" data, one is: java.lang.Exception: exception" + } + + def "A scan for exceptions should work as expected when no failures are included"() { + given: + Set> set = Set.of( + new Try.Success<>("one"), + new Try.Success<>("two"), + new Try.Success<>("three") + ) + + when: + Try, Exception> scan = Try.scanCollection(set, String) + + then: + scan.success + scan.data.get().size() == 3 + } + + def "The getOrThrow method should work as expected"() { + given: + Try failure = new Try.Failure<>(new SourceException("source exception")) + + when: + failure.orThrow + + then: + Exception ex = thrown() + ex.class == SourceException + ex.message == "source exception" + } + + def "A Try objects transformation should work as correctly for successes"() { + given: + Try success = new Try.Success<>("5") + SourceException exc = new SourceException("source exception") + + when: + Try transformS = success.transformS(str -> Integer.parseInt(str) ) + Try map = success.map(str -> Integer.parseInt(str) ) + Try transformF = success.transformF(ex -> new Exception(ex) ) + Try transform = success.transform(str -> Integer.parseInt(str), ex -> new Exception(ex) ) + Try flatMapS = success.flatMap(str -> new Try.Success(Integer.parseInt(str)) ) + Try flatMapF = success.flatMap(str -> new Try.Failure(exc) ) + + then: + transformS.success + map.success + transformF.success + transform.success + flatMapS.success + flatMapF.failure + + transformS.data.get() == 5 + map.data.get() == 5 + transformF.data.get() == "5" + transform.data.get() == 5 + flatMapS.data.get() == 5 + flatMapF.exception.get() == exc + } + + def "A Try objects transformation should work as correctly for failures"() { + given: + Try failure = new Try.Failure<>(new SourceException("")) + + when: + Try transformS = failure.transformS(str -> Integer.parseInt(str) ) + Try map = failure.map(str -> Integer.parseInt(str) ) + Try transformF = failure.transformF(ex -> new Exception(ex) ) + Try transform = failure.transform(str -> Integer.parseInt(str), ex -> new Exception(ex) ) + Try flatMapS = failure.flatMap(str -> new Try.Success(Integer.parseInt(str)) ) + Try flatMapF = failure.flatMap(str -> new Try.Failure(new SourceException("not returned")) ) + + then: + transformS.failure + map.failure + transformF.failure + transform.failure + flatMapS.failure + flatMapF.failure + + transformS.exception.get().class == SourceException + map.exception.get().class == SourceException + transformF.exception.get().class == Exception + transform.exception.get().class == Exception + flatMapS.exception.get() == failure.get() + flatMapF.exception.get() == failure.get() + } + + def "All exceptions of a collection of try objects should be returned"() { + given: + List> tries = List.of( + new Try.Success<>("one"), + new Try.Failure<>(new SourceException("source exception")), + new Try.Failure<>(new UnsupportedOperationException("unsupported operation exception")), + new Try.Success<>("two"), + new Try.Failure<>(new SourceException("source exception 2")) + ) + + when: + List exceptions = Try.getExceptions(tries) + + then: + exceptions.size() == 3 + + exceptions.get(0).with { + assert it.class == SourceException + assert it.message == "source exception" + } + + exceptions.get(1).with { + assert it.class == UnsupportedOperationException + assert it.message == "unsupported operation exception" + } + + exceptions.get(2).with { + assert it.class == SourceException + assert it.message == "source exception 2" + } + } +} diff --git a/src/test/groovy/edu/ie3/datamodel/utils/validation/ConnectorValidationUtilsTest.groovy b/src/test/groovy/edu/ie3/datamodel/utils/validation/ConnectorValidationUtilsTest.groovy index 2ef09e125..97a2ff4a8 100644 --- a/src/test/groovy/edu/ie3/datamodel/utils/validation/ConnectorValidationUtilsTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/utils/validation/ConnectorValidationUtilsTest.groovy @@ -6,30 +6,25 @@ package edu.ie3.datamodel.utils.validation import static edu.ie3.datamodel.models.StandardUnits.* -import static edu.ie3.util.quantities.PowerSystemUnits.* +import static tech.units.indriya.unit.Units.METRE +import edu.ie3.datamodel.exceptions.InvalidEntityException import edu.ie3.datamodel.models.input.connector.LineInput import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput import edu.ie3.datamodel.models.input.connector.type.Transformer3WTypeInput import edu.ie3.datamodel.models.input.system.characteristic.OlmCharacteristicInput import edu.ie3.datamodel.models.voltagelevels.GermanVoltageLevelUtils -import org.locationtech.jts.geom.LineString -import tech.units.indriya.ComparableQuantity - -import javax.measure.quantity.Angle -import javax.measure.quantity.Dimensionless -import javax.measure.quantity.ElectricConductance -import javax.measure.quantity.ElectricPotential -import javax.measure.quantity.ElectricResistance -import javax.measure.quantity.Power - -import edu.ie3.datamodel.exceptions.InvalidEntityException +import edu.ie3.datamodel.utils.Try import edu.ie3.test.common.GridTestData import edu.ie3.util.geo.GeoUtils import org.locationtech.jts.geom.Coordinate +import org.locationtech.jts.geom.LineString import spock.lang.Specification +import tech.units.indriya.ComparableQuantity import tech.units.indriya.quantity.Quantities +import javax.measure.quantity.* + class ConnectorValidationUtilsTest extends Specification { def "Instantiating a ConnectorValidationUtil leads to an exception"() { @@ -69,22 +64,20 @@ class ConnectorValidationUtilsTest extends Specification { def "ConnectorValidationUtils.checkLine() recognizes all potential errors for a line"() { when: - ConnectorValidationUtils.check(invalidLine) + List> exceptions = ConnectorValidationUtils.check(invalidLine).stream().filter { it -> it.failure }.toList() then: - Exception ex = thrown() + exceptions.size() == expectedSize + Exception ex = exceptions.get(0).exception.get() ex.class == expectedException.class ex.message == expectedException.message where: - invalidLine || expectedException - GridTestData.lineFtoG.copy().nodeA(GridTestData.nodeG).build() || new InvalidEntityException("LineInput connects the same node, but shouldn't", invalidLine) - GridTestData.lineFtoG.copy().nodeA(GridTestData.nodeF.copy().subnet(5).build()).build() || new InvalidEntityException("LineInput connects different subnets, but shouldn't", invalidLine) - GridTestData.lineFtoG.copy().nodeA(GridTestData.nodeF.copy().voltLvl(GermanVoltageLevelUtils.MV_10KV).build()).build() || new InvalidEntityException("LineInput connects different voltage levels, but shouldn't", invalidLine) - GridTestData.lineFtoG.copy().length(Quantities.getQuantity(0d, METRE)).build() || new InvalidEntityException("The following quantities have to be positive: 0.0 km", invalidLine) - GridTestData.lineFtoG.copy().nodeA(GridTestData.nodeF.copy().geoPosition(testCoordinate).build()).build() || new InvalidEntityException("Coordinates of start and end point do not match coordinates of connected nodes", invalidLine) - GridTestData.lineFtoG.copy().nodeB(GridTestData.nodeG.copy().geoPosition(testCoordinate).build()).build() || new InvalidEntityException("Coordinates of start and end point do not match coordinates of connected nodes", invalidLine) - invalidLineLengthNotMatchingCoordinateDistances || new InvalidEntityException("Line length does not equal calculated distances between points building the line", invalidLine) + invalidLine || expectedSize || expectedException + GridTestData.lineFtoG.copy().nodeA(GridTestData.nodeG).build() || 1 || new InvalidEntityException("LineInput connects the same node, but shouldn't", invalidLine) + GridTestData.lineFtoG.copy().nodeA(GridTestData.nodeF.copy().subnet(5).build()).build() || 1 || new InvalidEntityException("LineInput connects different subnets, but shouldn't", invalidLine) + GridTestData.lineFtoG.copy().nodeA(GridTestData.nodeF.copy().voltLvl(GermanVoltageLevelUtils.MV_10KV).build()).build() || 1 || new InvalidEntityException("LineInput connects different voltage levels, but shouldn't", invalidLine) + GridTestData.lineFtoG.copy().length(Quantities.getQuantity(0d, METRE)).build() || 1 || new InvalidEntityException("The following quantities have to be positive: 0.0 km", invalidLine) } def "Smoke Test: Correct line type throws no exception"() { @@ -113,19 +106,20 @@ class ConnectorValidationUtilsTest extends Specification { def "ConnectorValidationUtils.checkTransformer2W recognizes all potential errors for a transformer2W"() { when: - ConnectorValidationUtils.check(invalidTransformer2W) + List> exceptions = ConnectorValidationUtils.check(invalidTransformer2W).stream().filter { it -> it.failure }.toList() then: - Exception ex = thrown() + exceptions.size() == expectedSize + Exception ex = exceptions.get(0).exception.get() ex.class == expectedException.class ex.message == expectedException.message where: - invalidTransformer2W || expectedException - GridTestData.transformerBtoD.copy().tapPos(100).build() || new InvalidEntityException("Tap position of Transformer2WInput is outside of bounds", invalidTransformer2W) - GridTestData.transformerBtoD.copy().nodeB(GridTestData.nodeD.copy().voltLvl(GermanVoltageLevelUtils.HV).build()).build() || new InvalidEntityException("Transformer2WInput connects the same voltage level, but shouldn't", invalidTransformer2W) - GridTestData.transformerBtoD.copy().nodeB(GridTestData.nodeD.copy().subnet(2).build()).build() || new InvalidEntityException("Transformer2WInput connects the same subnet, but shouldn't", invalidTransformer2W) - GridTestData.transformerBtoD.copy().nodeB(GridTestData.nodeD.copy().voltLvl(GermanVoltageLevelUtils.MV_30KV).build()).build() || new InvalidEntityException("Rated voltages of Transformer2WInput do not equal voltage levels at the nodes", invalidTransformer2W) + invalidTransformer2W || expectedSize || expectedException + GridTestData.transformerBtoD.copy().tapPos(100).build() || 1 || new InvalidEntityException("Tap position of Transformer2WInput is outside of bounds", invalidTransformer2W) + GridTestData.transformerBtoD.copy().nodeB(GridTestData.nodeD.copy().voltLvl(GermanVoltageLevelUtils.HV).build()).build() || 2 || new InvalidEntityException("Transformer2WInput connects the same voltage level, but shouldn't", invalidTransformer2W) + GridTestData.transformerBtoD.copy().nodeB(GridTestData.nodeD.copy().subnet(2).build()).build() || 1 || new InvalidEntityException("Transformer2WInput connects the same subnet, but shouldn't", invalidTransformer2W) + GridTestData.transformerBtoD.copy().nodeB(GridTestData.nodeD.copy().voltLvl(GermanVoltageLevelUtils.MV_30KV).build()).build() || 1 || new InvalidEntityException("Rated voltages of Transformer2WInput do not equal voltage levels at the nodes", invalidTransformer2W) } def "Smoke Test: Correct transformer2W type throws no exception"() { @@ -162,8 +156,7 @@ class ConnectorValidationUtilsTest extends Specification { then: Exception ex = thrown() - ex.class == expectedException.class - ex.message == expectedException.message + ex.message.contains(expectedException.message) where: invalidTransformer2WType || expectedException @@ -185,19 +178,19 @@ class ConnectorValidationUtilsTest extends Specification { def "ConnectorValidationUtils.checkTransformer3W recognizes all potential errors for a transformer3W"() { when: - ConnectorValidationUtils.check(invalidTransformer3W) + List> exceptions = ConnectorValidationUtils.check(invalidTransformer3W).stream().filter { it -> it.failure }.toList() then: - Exception ex = thrown() - ex.class == expectedException.class + exceptions.size() == expectedSize + Exception ex = exceptions.get(0).exception.get() ex.message == expectedException.message where: - invalidTransformer3W || expectedException - GridTestData.transformerAtoBtoC.copy().tapPos(100).build() || new InvalidEntityException("Tap position of Transformer3WInput is outside of bounds", invalidTransformer3W) - GridTestData.transformerAtoBtoC.copy().nodeA(GridTestData.nodeA.copy().voltLvl(GermanVoltageLevelUtils.HV).build()).build() || new InvalidEntityException("Transformer connects nodes of the same voltage level", invalidTransformer3W) - GridTestData.transformerAtoBtoC.copy().nodeA(GridTestData.nodeA.copy().subnet(2).build()).build() || new InvalidEntityException("Transformer connects nodes in the same subnet", invalidTransformer3W) - GridTestData.transformerAtoBtoC.copy().nodeA(GridTestData.nodeA.copy().voltLvl(GermanVoltageLevelUtils.MV_30KV).build()).build() || new InvalidEntityException("Rated voltages of Transformer3WInput do not equal voltage levels at the nodes", invalidTransformer3W) + invalidTransformer3W || expectedSize || expectedException + GridTestData.transformerAtoBtoC.copy().tapPos(100).build() || 1 || new InvalidEntityException("Tap position of Transformer3WInput is outside of bounds", invalidTransformer3W) + GridTestData.transformerAtoBtoC.copy().nodeA(GridTestData.nodeA.copy().voltLvl(GermanVoltageLevelUtils.HV).build()).build() || 2 || new InvalidEntityException("Transformer connects nodes of the same voltage level", invalidTransformer3W) + GridTestData.transformerAtoBtoC.copy().nodeA(GridTestData.nodeA.copy().subnet(2).build()).build() || 1 || new InvalidEntityException("Transformer connects nodes in the same subnet", invalidTransformer3W) + GridTestData.transformerAtoBtoC.copy().nodeA(GridTestData.nodeA.copy().voltLvl(GermanVoltageLevelUtils.MV_30KV).build()).build() || 1 || new InvalidEntityException("Rated voltages of Transformer3WInput do not equal voltage levels at the nodes", invalidTransformer3W) } def "Smoke Test: Correct transformer3W type throws no exception"() { @@ -229,8 +222,7 @@ class ConnectorValidationUtilsTest extends Specification { then: Exception ex = thrown() - ex.class == expectedException.class - ex.message == expectedException.message + ex.message.contains(expectedException.message) where: invalidTransformer3WType || expectedException @@ -252,15 +244,15 @@ class ConnectorValidationUtilsTest extends Specification { def "ConnectorValidationUtils.checkSwitch recognizes all potential errors for a switch"() { when: - ConnectorValidationUtils.check(invalidSwitch) + List> exceptions = ConnectorValidationUtils.check(invalidSwitch).stream().filter { it -> it.failure }.toList() then: - Exception ex = thrown() - ex.class == expectedException.class + exceptions.size() == expectedSize + Exception ex = exceptions.get(0).exception.get() ex.message == expectedException.message where: - invalidSwitch || expectedException - GridTestData.switchAtoB || new InvalidEntityException("Switch connects two different voltage levels", invalidSwitch) + invalidSwitch || expectedSize || expectedException + GridTestData.switchAtoB || 1 || new InvalidEntityException("Switch connects two different voltage levels", invalidSwitch) } } diff --git a/src/test/groovy/edu/ie3/datamodel/utils/validation/InvalidAssetInput.groovy b/src/test/groovy/edu/ie3/datamodel/utils/validation/DummyAssetInput.groovy similarity index 58% rename from src/test/groovy/edu/ie3/datamodel/utils/validation/InvalidAssetInput.groovy rename to src/test/groovy/edu/ie3/datamodel/utils/validation/DummyAssetInput.groovy index 085dda21e..899c6279f 100644 --- a/src/test/groovy/edu/ie3/datamodel/utils/validation/InvalidAssetInput.groovy +++ b/src/test/groovy/edu/ie3/datamodel/utils/validation/DummyAssetInput.groovy @@ -1,5 +1,5 @@ /* - * © 2021. TU Dortmund University, + * © 2023. TU Dortmund University, * Institute of Energy Systems, Energy Efficiency and Energy Economics, * Research group Distribution grid planning and operation */ @@ -9,10 +9,17 @@ import edu.ie3.datamodel.models.input.AssetInput import java.time.ZonedDateTime -class InvalidAssetInput extends AssetInput { +class DummyAssetInput extends AssetInput { + DummyAssetInput(String id) { + super(UUID.randomUUID(), id) + } + + static DummyAssetInput valid(String id) { + return new DummyAssetInput(id) + } - InvalidAssetInput() { - super(UUID.randomUUID(), "invalid_asset") + static DummyAssetInput invalid() { + return new DummyAssetInput("invalid_asset") } @Override @@ -21,7 +28,7 @@ class InvalidAssetInput extends AssetInput { } @Override - UniqueEntityBuilder copy() { + AssetInputCopyBuilder copy() { throw new UnsupportedOperationException("This is a dummy class") } } diff --git a/src/test/groovy/edu/ie3/datamodel/utils/validation/GraphicValidationUtilsTest.groovy b/src/test/groovy/edu/ie3/datamodel/utils/validation/GraphicValidationUtilsTest.groovy index 19a5264d7..aefd2825b 100644 --- a/src/test/groovy/edu/ie3/datamodel/utils/validation/GraphicValidationUtilsTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/utils/validation/GraphicValidationUtilsTest.groovy @@ -6,6 +6,7 @@ package edu.ie3.datamodel.utils.validation import edu.ie3.datamodel.exceptions.InvalidEntityException +import edu.ie3.datamodel.utils.Try import edu.ie3.test.common.GridTestData import spock.lang.Specification @@ -29,43 +30,46 @@ class GraphicValidationUtilsTest extends Specification { def "GraphicValidationUtils.check() recognizes all potential errors for a graphic input"() { when: - GraphicValidationUtils.check(invalidGraphicInput) + List> exceptions = GraphicValidationUtils.check(invalidGraphicInput).stream().filter { it -> it.failure }.toList() then: - Exception ex = thrown() + exceptions.size() == expectedSize + Exception ex = exceptions.get(0).exception.get() ex.class == expectedException.class ex.message == expectedException.message where: - invalidGraphicInput || expectedException - GridTestData.lineGraphicCtoD.copy().graphicLayer(null).build() || new InvalidEntityException("Graphic Layer of graphic element is not defined", invalidGraphicInput) + invalidGraphicInput || expectedSize || expectedException + GridTestData.lineGraphicCtoD.copy().graphicLayer(null).build() || 1 || new InvalidEntityException("Graphic Layer of graphic element is not defined", invalidGraphicInput) } def "GraphicValidationUtils.checkLineGraphicInput() recognizes all potential errors for a line graphic input"() { when: - GraphicValidationUtils.check(invalidLineGraphicInput) + List> exceptions = GraphicValidationUtils.check(invalidLineGraphicInput).stream().filter { it -> it.failure }.toList() then: - Exception ex = thrown() + exceptions.size() == expectedSize + Exception ex = exceptions.get(0).exception.get() ex.class == expectedException.class ex.message == expectedException.message where: - invalidLineGraphicInput || expectedException - GridTestData.lineGraphicCtoD.copy().path(null).build() || new InvalidEntityException("Path of line graphic element is not defined", invalidLineGraphicInput) + invalidLineGraphicInput || expectedSize || expectedException + GridTestData.lineGraphicCtoD.copy().path(null).build() || 1 || new InvalidEntityException("Path of line graphic element is not defined", invalidLineGraphicInput) } def "GraphicValidationUtils.checkNodeGraphicInput() recognizes all potential errors for a line graphic input"() { when: - GraphicValidationUtils.check(invalidNodeGraphicInput) + List> exceptions = GraphicValidationUtils.check(invalidNodeGraphicInput).stream().filter { it -> it.failure }.toList() then: - Exception ex = thrown() + exceptions.size() == expectedSize + Exception ex = exceptions.get(0).exception.get() ex.class == expectedException.class ex.message == expectedException.message where: - invalidNodeGraphicInput || expectedException - GridTestData.nodeGraphicC.copy().point(null).build() || new InvalidEntityException("Point of node graphic is not defined", invalidNodeGraphicInput) + invalidNodeGraphicInput || expectedSize || expectedException + GridTestData.nodeGraphicC.copy().point(null).build() || 1 || new InvalidEntityException("Point of node graphic is not defined", invalidNodeGraphicInput) } } diff --git a/src/test/groovy/edu/ie3/datamodel/utils/validation/InvalidSystemParticipantInput.groovy b/src/test/groovy/edu/ie3/datamodel/utils/validation/InvalidSystemParticipantInput.groovy index 737c40d38..9aee7acf6 100644 --- a/src/test/groovy/edu/ie3/datamodel/utils/validation/InvalidSystemParticipantInput.groovy +++ b/src/test/groovy/edu/ie3/datamodel/utils/validation/InvalidSystemParticipantInput.groovy @@ -22,7 +22,7 @@ class InvalidSystemParticipantInput extends SystemParticipantInput { } @Override - SystemParticipantInputCopyBuilder copy() { + SystemParticipantInputCopyBuilder copy() { throw new UnsupportedOperationException("This is a dummy class") } } diff --git a/src/test/groovy/edu/ie3/datamodel/utils/validation/MeasurementUnitValidationUtilsTest.groovy b/src/test/groovy/edu/ie3/datamodel/utils/validation/MeasurementUnitValidationUtilsTest.groovy index 8daa30362..39ea3ec89 100644 --- a/src/test/groovy/edu/ie3/datamodel/utils/validation/MeasurementUnitValidationUtilsTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/utils/validation/MeasurementUnitValidationUtilsTest.groovy @@ -6,6 +6,8 @@ package edu.ie3.datamodel.utils.validation import edu.ie3.datamodel.exceptions.UnsafeEntityException +import edu.ie3.datamodel.exceptions.ValidationException +import edu.ie3.datamodel.utils.Try import edu.ie3.test.common.GridTestData import spock.lang.Specification @@ -24,10 +26,11 @@ class MeasurementUnitValidationUtilsTest extends Specification { def "MeasurementUnitValidationUtils.check() recognizes all potential errors for a measurement unit"() { when: - MeasurementUnitValidationUtils.check(invalidMeasurementUnit) + Try exception = MeasurementUnitValidationUtils.check(invalidMeasurementUnit) then: - Exception ex = thrown() + exception.failure + Exception ex = exception.exception.get() ex.class == expectedException.class ex.message == expectedException.message diff --git a/src/test/groovy/edu/ie3/datamodel/utils/validation/NodeValidationUtilsTest.groovy b/src/test/groovy/edu/ie3/datamodel/utils/validation/NodeValidationUtilsTest.groovy index f77752eb3..f17b2ae3e 100644 --- a/src/test/groovy/edu/ie3/datamodel/utils/validation/NodeValidationUtilsTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/utils/validation/NodeValidationUtilsTest.groovy @@ -8,12 +8,13 @@ package edu.ie3.datamodel.utils.validation import static edu.ie3.util.quantities.PowerSystemUnits.KILOVOLT import static edu.ie3.util.quantities.PowerSystemUnits.PU +import edu.ie3.datamodel.exceptions.InvalidEntityException import edu.ie3.datamodel.exceptions.UnsafeEntityException +import edu.ie3.datamodel.exceptions.ValidationException import edu.ie3.datamodel.models.voltagelevels.CommonVoltageLevel -import edu.ie3.util.interval.RightOpenInterval - -import edu.ie3.datamodel.exceptions.InvalidEntityException +import edu.ie3.datamodel.utils.Try import edu.ie3.test.common.GridTestData +import edu.ie3.util.interval.RightOpenInterval import spock.lang.Specification import tech.units.indriya.quantity.Quantities @@ -24,39 +25,40 @@ class NodeValidationUtilsTest extends Specification { def node = GridTestData.nodeA when: - NodeValidationUtils.check(node) + List> tries = NodeValidationUtils.check(node) then: - noExceptionThrown() + tries.every { it.success } } def "The check method recognizes all potential errors for a node"() { when: - NodeValidationUtils.check(invalidNode) + List> exceptions = NodeValidationUtils.check(invalidNode).stream().filter { it -> it.failure }.toList() then: - Exception ex = thrown() + exceptions.size() == expectedSize + Exception ex = exceptions.get(0).exception.get() ex.class == expectedException.class ex.message == expectedException.message where: - invalidNode || expectedException - GridTestData.nodeA.copy().voltLvl(null).build() || new InvalidEntityException("Expected a voltage level, but got nothing. :-(", new NullPointerException()) + invalidNode || expectedSize || expectedException + GridTestData.nodeA.copy().voltLvl(null).build() || 1 || new InvalidEntityException("Validation not possible because received object was null. Expected a voltage level, but got nothing. :-(", new NullPointerException()) GridTestData.nodeA.copy().voltLvl(new CommonVoltageLevel( "null", null, new HashSet<>(Arrays.asList("null")), new RightOpenInterval<>( - Quantities.getQuantity(380d, KILOVOLT), Quantities.getQuantity(560d, KILOVOLT)))).build() || new InvalidEntityException("Node has invalid voltage level", invalidNode) + Quantities.getQuantity(380d, KILOVOLT), Quantities.getQuantity(560d, KILOVOLT)))).build() || 1 || new InvalidEntityException("Node has invalid voltage level", invalidNode) GridTestData.nodeA.copy().voltLvl(new CommonVoltageLevel( "zero volt", Quantities.getQuantity(0d, KILOVOLT), new HashSet<>(Arrays.asList("zero volt")), new RightOpenInterval<>( - Quantities.getQuantity(380d, KILOVOLT), Quantities.getQuantity(560d, KILOVOLT)))).build() || new InvalidEntityException("Node has invalid voltage level", invalidNode) - GridTestData.nodeA.copy().subnet(0).build() || new InvalidEntityException("Subnet can't be zero or negative", invalidNode) - GridTestData.nodeA.copy().geoPosition(null).build() || new InvalidEntityException("GeoPosition of node is null", invalidNode) - GridTestData.nodeA.copy().vTarget(Quantities.getQuantity(0d, PU)).build() || new InvalidEntityException("Target voltage (p.u.) is not a positive value", invalidNode) - GridTestData.nodeA.copy().vTarget(Quantities.getQuantity(2.1d, PU)).build() || new UnsafeEntityException("Target voltage (p.u.) might be too high", invalidNode) + Quantities.getQuantity(380d, KILOVOLT), Quantities.getQuantity(560d, KILOVOLT)))).build() || 1 || new InvalidEntityException("Node has invalid voltage level", invalidNode) + GridTestData.nodeA.copy().subnet(0).build() || 1 || new InvalidEntityException("Subnet can't be zero or negative", invalidNode) + GridTestData.nodeA.copy().geoPosition(null).build() || 1 || new InvalidEntityException("GeoPosition of node is null", invalidNode) + GridTestData.nodeA.copy().vTarget(Quantities.getQuantity(0d, PU)).build() || 1 || new InvalidEntityException("Target voltage (p.u.) is not a positive value", invalidNode) + GridTestData.nodeA.copy().vTarget(Quantities.getQuantity(2.1d, PU)).build() || 1 || new UnsafeEntityException("Target voltage (p.u.) might be too high", invalidNode) } } diff --git a/src/test/groovy/edu/ie3/datamodel/utils/validation/SystemParticipantValidationUtilsTest.groovy b/src/test/groovy/edu/ie3/datamodel/utils/validation/SystemParticipantValidationUtilsTest.groovy index 85e796d99..6f9cc47aa 100644 --- a/src/test/groovy/edu/ie3/datamodel/utils/validation/SystemParticipantValidationUtilsTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/utils/validation/SystemParticipantValidationUtilsTest.groovy @@ -10,6 +10,7 @@ import edu.ie3.datamodel.exceptions.NotImplementedException import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.system.characteristic.WecCharacteristicInput import edu.ie3.datamodel.models.input.system.type.* +import edu.ie3.datamodel.utils.Try import edu.ie3.test.common.SystemParticipantTestData import edu.ie3.util.quantities.interfaces.Currency import edu.ie3.util.quantities.interfaces.DimensionlessRate @@ -47,16 +48,17 @@ class SystemParticipantValidationUtilsTest extends Specification { def "SystemParticipantValidationUtils.check() recognizes all potential errors for a system participant"() { when: - SystemParticipantValidationUtils.check(invalidSystemParticipant) + List> exceptions = SystemParticipantValidationUtils.check(invalidSystemParticipant).stream().filter { it -> it.failure }.toList() then: - Exception ex = thrown() + exceptions.size() == expectedSize + Exception ex = exceptions.get(0).exception.get() ex.class == expectedException.class ex.message == expectedException.message where: - invalidSystemParticipant || expectedException - SystemParticipantTestData.bmInput.copy().qCharacteristics(null).build() || new InvalidEntityException("Reactive power characteristics of system participant is not defined", invalidSystemParticipant) + invalidSystemParticipant || expectedSize || expectedException + SystemParticipantTestData.bmInput.copy().qCharacteristics(null).build() || 1 || new InvalidEntityException("Reactive power characteristics of system participant is not defined", invalidSystemParticipant) } // Common data for all system participant types @@ -99,7 +101,8 @@ class SystemParticipantValidationUtilsTest extends Specification { SystemParticipantValidationUtils.check(invalidType) then: - Exception ex = thrown() + Throwable topEx = thrown() + Throwable ex = topEx.cause ex.class == expectedException.class ex.message == expectedException.message @@ -138,17 +141,18 @@ class SystemParticipantValidationUtilsTest extends Specification { def "SystemParticipantValidationUtils.checkBmType() recognizes all potential errors for a biomass power plant type"() { when: - SystemParticipantValidationUtils.check(invalidBmType) + ValidationUtils.check(invalidBmType) then: - Exception ex = thrown() + Throwable topEx = thrown() + Throwable ex = topEx.cause ex.class == expectedException.class ex.message == expectedException.message where: - invalidBmType || expectedException - new BmTypeInput(uuid, id, capex, opex, Quantities.getQuantity(-25, ACTIVE_POWER_GRADIENT), sRated, cosPhiRated, etaConv) || new InvalidEntityException("The following quantities have to be zero or positive: -25 %/h", invalidBmType) - new BmTypeInput(uuid, id, capex, opex, activePowerGradient, sRated, cosPhiRated, Quantities.getQuantity(1000d, PERCENT)) || new InvalidEntityException("Efficiency of inverter of BmTypeInput must be between 0% and 100%", invalidBmType) + invalidBmType || expectedException + new BmTypeInput(uuid, id, capex, opex, Quantities.getQuantity(-25, ACTIVE_POWER_GRADIENT), sRated, cosPhiRated, etaConv) || new InvalidEntityException("The following quantities have to be zero or positive: -25 %/h", invalidBmType) + new BmTypeInput(uuid, id, capex, opex, activePowerGradient, sRated, cosPhiRated, Quantities.getQuantity(1000d, PERCENT)) || new InvalidEntityException("Efficiency of inverter of BmTypeInput must be between 0% and 100%", invalidBmType) } // CHP @@ -182,7 +186,8 @@ class SystemParticipantValidationUtilsTest extends Specification { SystemParticipantValidationUtils.check(invalidChpType) then: - Exception ex = thrown() + Throwable topEx = thrown() + Throwable ex = topEx.cause ex.class == expectedException.class ex.message == expectedException.message @@ -225,7 +230,8 @@ class SystemParticipantValidationUtilsTest extends Specification { SystemParticipantValidationUtils.check(invalidEvType) then: - Exception ex = thrown() + Throwable topEx = thrown() + Throwable ex = topEx.cause ex.class == expectedException.class ex.message == expectedException.message @@ -249,17 +255,18 @@ class SystemParticipantValidationUtilsTest extends Specification { def "SystemParticipantValidationUtils.checkFixedFeedIn() recognizes all potential errors for an a Fixed Feed-In"() { when: - SystemParticipantValidationUtils.check(invalidFixedFeedIn) + List> exceptions = SystemParticipantValidationUtils.check(invalidFixedFeedIn).stream().filter { it -> it.failure }.toList() then: - Exception ex = thrown() + exceptions.size() == expectedSize + Exception ex = exceptions.get(0).exception.get() ex.class == expectedException.class ex.message == expectedException.message where: - invalidFixedFeedIn || expectedException - SystemParticipantTestData.fixedFeedInInput.copy().sRated(Quantities.getQuantity(-100d, ACTIVE_POWER_IN)).build() || new InvalidEntityException("The following quantities have to be zero or positive: -100 kVA", invalidFixedFeedIn) - SystemParticipantTestData.fixedFeedInInput.copy().cosPhiRated(-1d).build() || new InvalidEntityException("Rated power factor of FixedFeedInInput must be between 0 and 1", invalidFixedFeedIn) + invalidFixedFeedIn || expectedSize || expectedException + SystemParticipantTestData.fixedFeedInInput.copy().sRated(Quantities.getQuantity(-100d, ACTIVE_POWER_IN)).build() || 1 || new InvalidEntityException("The following quantities have to be zero or positive: -100 kVA", invalidFixedFeedIn) + SystemParticipantTestData.fixedFeedInInput.copy().cosPhiRated(-1d).build() || 1 || new InvalidEntityException("Rated power factor of FixedFeedInInput must be between 0 and 1", invalidFixedFeedIn) } // HP @@ -293,7 +300,8 @@ class SystemParticipantValidationUtilsTest extends Specification { SystemParticipantValidationUtils.check(invalidHpType) then: - Exception ex = thrown() + Throwable topEx = thrown() + Throwable ex = topEx.cause ex.class == expectedException.class ex.message == expectedException.message @@ -317,18 +325,19 @@ class SystemParticipantValidationUtilsTest extends Specification { def "SystemParticipantValidationUtils.checkLoad() recognizes all potential errors for a load"() { when: - SystemParticipantValidationUtils.check(invalidLoad) + List> exceptions = SystemParticipantValidationUtils.check(invalidLoad).stream().filter { it -> it.failure }.toList() then: - Exception ex = thrown() + exceptions.size() == expectedSize + Exception ex = exceptions.get(0).exception.get() ex.class == expectedException.class ex.message == expectedException.message where: - invalidLoad || expectedException - SystemParticipantTestData.loadInput.copy().loadprofile(null).build() || new InvalidEntityException("No standard load profile defined for load", invalidLoad) - SystemParticipantTestData.loadInput.copy().sRated(Quantities.getQuantity(-25d, ACTIVE_POWER_IN)).eConsAnnual(Quantities.getQuantity(-4000, ENERGY_IN)).build() || new InvalidEntityException("The following quantities have to be zero or positive: -25 kVA, -4000 kWh", invalidLoad) - SystemParticipantTestData.loadInput.copy().cosPhiRated(2).build() || new InvalidEntityException("Rated power factor of LoadInput must be between 0 and 1", invalidLoad) + invalidLoad || expectedSize || expectedException + SystemParticipantTestData.loadInput.copy().loadprofile(null).build() || 1 || new InvalidEntityException("No standard load profile defined for load", invalidLoad) + SystemParticipantTestData.loadInput.copy().sRated(Quantities.getQuantity(-25d, ACTIVE_POWER_IN)).eConsAnnual(Quantities.getQuantity(-4000, ENERGY_IN)).build() || 1 || new InvalidEntityException("The following quantities have to be zero or positive: -25 kVA, -4000 kWh", invalidLoad) + SystemParticipantTestData.loadInput.copy().cosPhiRated(2).build() || 1 || new InvalidEntityException("Rated power factor of LoadInput must be between 0 and 1", invalidLoad) } // PV @@ -346,21 +355,22 @@ class SystemParticipantValidationUtilsTest extends Specification { def "SystemParticipantValidationUtils.checkPV() recognizes all potential errors for a PV"() { when: - SystemParticipantValidationUtils.check(invalidPV) + List> exceptions = SystemParticipantValidationUtils.check(invalidPV).stream().filter { it -> it.failure }.toList() then: - Exception ex = thrown() + exceptions.size() == expectedSize + Exception ex = exceptions.get(0).exception.get() ex.class == expectedException.class ex.message == expectedException.message where: - invalidPV || expectedException - SystemParticipantTestData.pvInput.copy().sRated(Quantities.getQuantity(-25d, ACTIVE_POWER_IN)).build() || new InvalidEntityException("The following quantities have to be zero or positive: -25 kVA", invalidPV) - SystemParticipantTestData.pvInput.copy().albedo(2).build() || new InvalidEntityException("Albedo of the plant's surrounding of PvInput must be between 0 and 1", invalidPV) - SystemParticipantTestData.pvInput.copy().azimuth(Quantities.getQuantity(-100d, AZIMUTH)).build() || new InvalidEntityException("Azimuth angle of PvInput must be between -90° (east) and 90° (west)", invalidPV) - SystemParticipantTestData.pvInput.copy().etaConv(Quantities.getQuantity(110d, EFFICIENCY)).build() || new InvalidEntityException("Efficiency of the converter of PvInput must be between 0% and 100%", invalidPV) - SystemParticipantTestData.pvInput.copy().elevationAngle(Quantities.getQuantity(100d, SOLAR_ELEVATION_ANGLE)).build() || new InvalidEntityException("Tilted inclination from horizontal of PvInput must be between 0° and 90°", invalidPV) - SystemParticipantTestData.pvInput.copy().cosPhiRated(2).build() || new InvalidEntityException("Rated power factor of PvInput must be between 0 and 1", invalidPV) + invalidPV || expectedSize || expectedException + SystemParticipantTestData.pvInput.copy().sRated(Quantities.getQuantity(-25d, ACTIVE_POWER_IN)).build() || 1 || new InvalidEntityException("The following quantities have to be zero or positive: -25 kVA", invalidPV) + SystemParticipantTestData.pvInput.copy().albedo(2).build() || 1 || new InvalidEntityException("Albedo of the plant's surrounding of PvInput must be between 0 and 1", invalidPV) + SystemParticipantTestData.pvInput.copy().azimuth(Quantities.getQuantity(-100d, AZIMUTH)).build() || 1 || new InvalidEntityException("Azimuth angle of PvInput must be between -90° (east) and 90° (west)", invalidPV) + SystemParticipantTestData.pvInput.copy().etaConv(Quantities.getQuantity(110d, EFFICIENCY)).build() || 1 || new InvalidEntityException("Efficiency of the converter of PvInput must be between 0% and 100%", invalidPV) + SystemParticipantTestData.pvInput.copy().elevationAngle(Quantities.getQuantity(100d, SOLAR_ELEVATION_ANGLE)).build() || 1 || new InvalidEntityException("Tilted inclination from horizontal of PvInput must be between 0° and 90°", invalidPV) + SystemParticipantTestData.pvInput.copy().cosPhiRated(2).build() || 1 || new InvalidEntityException("Rated power factor of PvInput must be between 0 and 1", invalidPV) } // Storage @@ -394,7 +404,8 @@ class SystemParticipantValidationUtilsTest extends Specification { SystemParticipantValidationUtils.check(invalidStorageType) then: - Exception ex = thrown() + Throwable topEx = thrown() + Throwable ex = topEx.cause ex.class == expectedException.class ex.message == expectedException.message @@ -438,7 +449,8 @@ class SystemParticipantValidationUtilsTest extends Specification { SystemParticipantValidationUtils.check(invalidWecType) then: - Exception ex = thrown() + Throwable topEx = thrown() + Throwable ex = topEx.cause ex.class == expectedException.class ex.message == expectedException.message @@ -455,10 +467,10 @@ class SystemParticipantValidationUtilsTest extends Specification { def invalidParticipant = new InvalidSystemParticipantInput(node) when: - SystemParticipantValidationUtils.check(invalidParticipant) + List> exceptions = SystemParticipantValidationUtils.check(invalidParticipant).stream().filter { it -> it.failure }.toList() then: - def e = thrown(NotImplementedException) + def e = exceptions.get(0).exception.get().cause e.message == "Cannot validate object of class 'InvalidSystemParticipantInput', as no routine is implemented." } @@ -470,8 +482,9 @@ class SystemParticipantValidationUtilsTest extends Specification { SystemParticipantValidationUtils.check(invalidParticipantInput) then: - def e = thrown(NotImplementedException) - e.message == "Cannot validate object of class 'InvalidSystemParticipantTypeInput', as no routine is implemented." + Throwable topEx = thrown() + Throwable e = topEx.cause + e.message.contains "Cannot validate object of class 'InvalidSystemParticipantTypeInput', as no routine is implemented." } def "Checking electric vehicle charging stations leads to an exception"() { diff --git a/src/test/groovy/edu/ie3/datamodel/utils/validation/ThermalUnitValidationUtilsTest.groovy b/src/test/groovy/edu/ie3/datamodel/utils/validation/ThermalUnitValidationUtilsTest.groovy index ca25fa8fe..5b991a565 100644 --- a/src/test/groovy/edu/ie3/datamodel/utils/validation/ThermalUnitValidationUtilsTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/utils/validation/ThermalUnitValidationUtilsTest.groovy @@ -6,11 +6,13 @@ package edu.ie3.datamodel.utils.validation import edu.ie3.datamodel.exceptions.InvalidEntityException +import edu.ie3.datamodel.exceptions.ValidationException import edu.ie3.datamodel.models.OperationTime import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.thermal.CylindricalStorageInput import edu.ie3.datamodel.models.input.thermal.ThermalHouseInput +import edu.ie3.datamodel.utils.Try import edu.ie3.test.common.SystemParticipantTestData import edu.ie3.test.common.ThermalUnitInputTestData import edu.ie3.util.TimeUtil @@ -64,20 +66,21 @@ class ThermalUnitValidationUtilsTest extends Specification { def "ThermalUnitValidationUtils.checkThermalHouse() recognizes all potential errors for a thermal house"() { when: - ThermalUnitValidationUtils.check(invalidThermalHouse) + List> exceptions = ThermalUnitValidationUtils.check(invalidThermalHouse).stream().filter { it -> it.failure }.toList() then: - Exception ex = thrown() + exceptions.size() == expectedSize + Exception ex = exceptions.get(0).exception.get() ex.class == expectedException.class ex.message == expectedException.message where: - invalidThermalHouse || expectedException - new ThermalHouseInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, Quantities.getQuantity(-10, StandardUnits.THERMAL_TRANSMISSION), ethCapa, TARGET_TEMPERATURE, UPPER_TEMPERATURE_LIMIT, LOWER_TEMPERATURE_LIMIT) || new InvalidEntityException("The following quantities have to be zero or positive: -10 kW/K", invalidThermalHouse) - new ThermalHouseInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, thermalConductance, Quantities.getQuantity(0, StandardUnits.HEAT_CAPACITY), TARGET_TEMPERATURE, UPPER_TEMPERATURE_LIMIT, LOWER_TEMPERATURE_LIMIT) || new InvalidEntityException("The following quantities have to be positive: 0 kWh/K", invalidThermalHouse) - new ThermalHouseInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, thermalConductance, ethCapa, Quantities.getQuantity(0, StandardUnits.TEMPERATURE), UPPER_TEMPERATURE_LIMIT, LOWER_TEMPERATURE_LIMIT) || new InvalidEntityException("Target temperature must be higher than lower temperature limit and lower than upper temperature limit", invalidThermalHouse) - new ThermalHouseInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, thermalConductance, ethCapa, TARGET_TEMPERATURE, Quantities.getQuantity(0, StandardUnits.TEMPERATURE), LOWER_TEMPERATURE_LIMIT) || new InvalidEntityException("Target temperature must be higher than lower temperature limit and lower than upper temperature limit", invalidThermalHouse) - new ThermalHouseInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, thermalConductance, ethCapa, TARGET_TEMPERATURE, UPPER_TEMPERATURE_LIMIT, Quantities.getQuantity(30, StandardUnits.TEMPERATURE)) || new InvalidEntityException("Target temperature must be higher than lower temperature limit and lower than upper temperature limit", invalidThermalHouse) + invalidThermalHouse || expectedSize || expectedException + new ThermalHouseInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, Quantities.getQuantity(-10, StandardUnits.THERMAL_TRANSMISSION), ethCapa, TARGET_TEMPERATURE, UPPER_TEMPERATURE_LIMIT, LOWER_TEMPERATURE_LIMIT) || 1 || new InvalidEntityException("The following quantities have to be zero or positive: -10 kW/K", invalidThermalHouse) + new ThermalHouseInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, thermalConductance, Quantities.getQuantity(0, StandardUnits.HEAT_CAPACITY), TARGET_TEMPERATURE, UPPER_TEMPERATURE_LIMIT, LOWER_TEMPERATURE_LIMIT) || 1 || new InvalidEntityException("The following quantities have to be positive: 0 kWh/K", invalidThermalHouse) + new ThermalHouseInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, thermalConductance, ethCapa, Quantities.getQuantity(0, StandardUnits.TEMPERATURE), UPPER_TEMPERATURE_LIMIT, LOWER_TEMPERATURE_LIMIT) || 1 || new InvalidEntityException("Target temperature must be higher than lower temperature limit and lower than upper temperature limit", invalidThermalHouse) + new ThermalHouseInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, thermalConductance, ethCapa, TARGET_TEMPERATURE, Quantities.getQuantity(0, StandardUnits.TEMPERATURE), LOWER_TEMPERATURE_LIMIT) || 1 || new InvalidEntityException("Target temperature must be higher than lower temperature limit and lower than upper temperature limit", invalidThermalHouse) + new ThermalHouseInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, thermalConductance, ethCapa, TARGET_TEMPERATURE, UPPER_TEMPERATURE_LIMIT, Quantities.getQuantity(30, StandardUnits.TEMPERATURE)) || 1 || new InvalidEntityException("Target temperature must be higher than lower temperature limit and lower than upper temperature limit", invalidThermalHouse) } // Thermal Cylindrical Storage @@ -95,18 +98,18 @@ class ThermalUnitValidationUtilsTest extends Specification { def "ThermalUnitValidationUtils.checkCylindricalStorage() recognizes all potential errors for a thermal cylindrical storage"() { when: - ThermalUnitValidationUtils.check(invalidCylindricalStorage) + List> exceptions = ThermalUnitValidationUtils.check(invalidCylindricalStorage).stream().filter { it -> it.failure }.toList() then: - Exception ex = thrown() + exceptions.size() == expectedSize + Exception ex = exceptions.get(0).exception.get() ex.class == expectedException.class ex.message == expectedException.message where: - invalidCylindricalStorage || expectedException - new CylindricalStorageInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, storageVolumeLvl, storageVolumeLvlMin, Quantities.getQuantity(100, StandardUnits.TEMPERATURE), Quantities.getQuantity(200, StandardUnits.TEMPERATURE), c) || new InvalidEntityException("Inlet temperature of the cylindrical storage cannot be lower than outlet temperature", invalidCylindricalStorage) - new CylindricalStorageInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, Quantities.getQuantity(100, StandardUnits.VOLUME), Quantities.getQuantity(200, StandardUnits.VOLUME), inletTemp, returnTemp, c) || new InvalidEntityException("Minimum permissible storage volume of the cylindrical storage cannot be higher than overall available storage volume", invalidCylindricalStorage) - new CylindricalStorageInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, Quantities.getQuantity(-100, StandardUnits.VOLUME), Quantities.getQuantity(-200, StandardUnits.VOLUME), inletTemp, returnTemp, Quantities.getQuantity(-1.05, StandardUnits.SPECIFIC_HEAT_CAPACITY)) || new InvalidEntityException("The following quantities have to be positive: -100 ㎥, -200 ㎥, -1.05 kWh/K*m³", invalidCylindricalStorage) + invalidCylindricalStorage || expectedSize || expectedException + new CylindricalStorageInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, storageVolumeLvl, storageVolumeLvlMin, Quantities.getQuantity(100, StandardUnits.TEMPERATURE), Quantities.getQuantity(200, StandardUnits.TEMPERATURE), c) || 1 || new InvalidEntityException("Inlet temperature of the cylindrical storage cannot be lower than outlet temperature", invalidCylindricalStorage) + new CylindricalStorageInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, Quantities.getQuantity(100, StandardUnits.VOLUME), Quantities.getQuantity(200, StandardUnits.VOLUME), inletTemp, returnTemp, c) || 1 || new InvalidEntityException("Minimum permissible storage volume of the cylindrical storage cannot be higher than overall available storage volume", invalidCylindricalStorage) + new CylindricalStorageInput(thermalUnitUuid, id, operator, operationTime, SystemParticipantTestData.thermalBus, Quantities.getQuantity(-100, StandardUnits.VOLUME), Quantities.getQuantity(-200, StandardUnits.VOLUME), inletTemp, returnTemp, Quantities.getQuantity(-1.05, StandardUnits.SPECIFIC_HEAT_CAPACITY)) || 1 || new InvalidEntityException("The following quantities have to be positive: -100 ㎥, -200 ㎥, -1.05 kWh/K*m³", invalidCylindricalStorage) } - } diff --git a/src/test/groovy/edu/ie3/datamodel/utils/validation/ValidationUtilsTest.groovy b/src/test/groovy/edu/ie3/datamodel/utils/validation/ValidationUtilsTest.groovy index 8c9a4c59b..26c43982d 100644 --- a/src/test/groovy/edu/ie3/datamodel/utils/validation/ValidationUtilsTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/utils/validation/ValidationUtilsTest.groovy @@ -5,21 +5,23 @@ */ package edu.ie3.datamodel.utils.validation -import static edu.ie3.datamodel.models.StandardUnits.CONDUCTANCE_PER_LENGTH -import static edu.ie3.datamodel.models.StandardUnits.ELECTRIC_CURRENT_MAGNITUDE -import static edu.ie3.datamodel.models.StandardUnits.RATED_VOLTAGE_MAGNITUDE -import static edu.ie3.datamodel.models.StandardUnits.SUSCEPTANCE_PER_LENGTH +import static edu.ie3.datamodel.models.StandardUnits.* +import static edu.ie3.datamodel.utils.validation.DummyAssetInput.invalid +import static edu.ie3.datamodel.utils.validation.DummyAssetInput.valid import static edu.ie3.util.quantities.PowerSystemUnits.OHM_PER_KILOMETRE import static edu.ie3.util.quantities.PowerSystemUnits.PU -import edu.ie3.datamodel.exceptions.NotImplementedException - +import edu.ie3.datamodel.exceptions.FailedValidationException import edu.ie3.datamodel.exceptions.InvalidEntityException +import edu.ie3.datamodel.exceptions.UnsafeEntityException +import edu.ie3.datamodel.exceptions.ValidationException import edu.ie3.datamodel.models.OperationTime +import edu.ie3.datamodel.models.input.AssetInput import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.connector.type.LineTypeInput import edu.ie3.datamodel.models.voltagelevels.GermanVoltageLevelUtils +import edu.ie3.datamodel.utils.Try import edu.ie3.test.common.GridTestData import edu.ie3.util.TimeUtil import edu.ie3.util.quantities.interfaces.SpecificConductance @@ -110,12 +112,11 @@ class ValidationUtilsTest extends Specification { then: Exception ex = thrown() - ex.class == expectedException.class - ex.message == expectedException.message + ex.message.contains(expectedException.message) where: invalidObject || expectedException - new Coordinate(10, 10) || new NotImplementedException("Cannot validate object of class '" + invalidObject.getClass().getSimpleName() + "', as no routine is implemented.") + new Coordinate(10, 10) || new FailedValidationException("Cannot validate object of class '" + invalidObject.class.simpleName + "', as no routine is implemented.") } def "The validation check method recognizes all potential errors for an asset"() { @@ -124,8 +125,7 @@ class ValidationUtilsTest extends Specification { then: Exception ex = thrown() - ex.class == expectedException.class - ex.message == expectedException.message + ex.message.contains(expectedException.message) where: invalidAsset || expectedException @@ -171,7 +171,7 @@ class ValidationUtilsTest extends Specification { then: InvalidEntityException ex = thrown() - ex.message == "Entity is invalid because of: The following quantities have to be zero or positive: -1 µS/km [LineTypeInput{uuid=3bed3eb3-9790-4874-89b5-a5434d408088, id=lineType_AtoB, b=-1 µS/km, g=0.0 µS/km, r=0.437 Ω/km, x=0.356 Ω/km, iMax=300 A, vRated=20 kV}]" + ex.message == "Entity is invalid because of: \nThe following quantities have to be zero or positive: -1 µS/km [LineTypeInput{uuid=3bed3eb3-9790-4874-89b5-a5434d408088, id=lineType_AtoB, b=-1 µS/km, g=0.0 µS/km, r=0.437 Ω/km, x=0.356 Ω/km, iMax=300 A, vRated=20 kV}]" } def "The check for zero or negative entities should work as expected"() { @@ -208,19 +208,20 @@ class ValidationUtilsTest extends Specification { then: InvalidEntityException ex = thrown() - ex.message == "Entity is invalid because of: The following quantities have to be positive: 0.0 µS/km [LineTypeInput{uuid=3bed3eb3-9790-4874-89b5-a5434d408088, id=lineType_AtoB, b=0.0 µS/km, g=0.0 µS/km, r=0.437 Ω/km, x=0.356 Ω/km, iMax=300 A, vRated=20 kV}]" + ex.message == "Entity is invalid because of: \nThe following quantities have to be positive: 0.0 µS/km [LineTypeInput{uuid=3bed3eb3-9790-4874-89b5-a5434d408088, id=lineType_AtoB, b=0.0 µS/km, g=0.0 µS/km, r=0.437 Ω/km, x=0.356 Ω/km, iMax=300 A, vRated=20 kV}]" } def "Checking an unsupported asset leads to an exception"() { given: - def invalidAsset = new InvalidAssetInput() + def invalidAsset = invalid() when: - ValidationUtils.checkAsset(invalidAsset) + List> exceptions = ValidationUtils.checkAsset(invalidAsset).stream().filter { it -> it.failure }.toList() then: - def e = thrown(NotImplementedException) - e.message == "Cannot validate object of class 'InvalidAssetInput', as no routine is implemented." + exceptions.size() == 1 + def e = exceptions.get(0).exception.get() + e.message.contains("Cannot validate object of class 'DummyAssetInput', as no routine is implemented.") } def "Checking an unsupported asset type leads to an exception"() { @@ -228,11 +229,12 @@ class ValidationUtilsTest extends Specification { def invalidAssetType = new InvalidAssetTypeInput() when: - ValidationUtils.checkAssetType(invalidAssetType) + List> exceptions = ValidationUtils.checkAssetType(invalidAssetType).stream().filter { it -> it.failure }.toList() then: - def e = thrown(NotImplementedException) - e.message == "Cannot validate object of class 'InvalidAssetTypeInput', as no routine is implemented." + exceptions.size() == 1 + def e = exceptions.get(0).exception.get() + e.message.contains("Cannot validate object of class 'InvalidAssetTypeInput', as no routine is implemented.") } def "Checking an asset type input without an id leads to an exception"() { @@ -240,10 +242,42 @@ class ValidationUtilsTest extends Specification { def invalidAssetType = new InvalidAssetTypeInput(UUID.randomUUID(), null) when: - ValidationUtils.checkAssetType(invalidAssetType) + List> exceptions = ValidationUtils.checkAssetType(invalidAssetType).stream().filter { it -> it.failure }.toList() + + then: + exceptions.size() == 2 + def e = exceptions.get(0).exception.get() + e.message.startsWith("Entity is invalid because of: \nNo ID assigned [AssetTypeInput") + } + + def "Checking if asset input ids are unique"() { + given: + Set validAssetIds = [ + valid("first"), + valid("second"), + valid("third") + ] + + when: + List> exceptions = ValidationUtils.checkIds(validAssetIds) + + then: + exceptions.every { ex -> ex.success } + } + + def "Duplicate asset input ids leads to an exception"() { + given: + Set invalidAssetIds = [ + invalid(), + invalid() + ] + + when: + List> exceptions = ValidationUtils.checkIds(invalidAssetIds) then: - def e = thrown(InvalidEntityException) - e.message.startsWith("Entity is invalid because of: No ID assigned [AssetTypeInput") + exceptions.size() == 1 + exceptions.get(0).failure + exceptions.get(0).exception.get().message.contains("Entity may be unsafe because of: There is already an entity with the id invalid_asset") } } diff --git a/src/test/groovy/edu/ie3/test/common/SampleJointGrid.groovy b/src/test/groovy/edu/ie3/test/common/SampleJointGrid.groovy index b9ed4be3f..a9f1aeb63 100644 --- a/src/test/groovy/edu/ie3/test/common/SampleJointGrid.groovy +++ b/src/test/groovy/edu/ie3/test/common/SampleJointGrid.groovy @@ -409,5 +409,4 @@ class SampleJointGrid extends SystemParticipantTestData { transformerType_MV_HV_110KV, 0, false) - } diff --git a/src/test/groovy/edu/ie3/test/common/SystemParticipantTestData.groovy b/src/test/groovy/edu/ie3/test/common/SystemParticipantTestData.groovy index ad2873211..bfafb5d6b 100644 --- a/src/test/groovy/edu/ie3/test/common/SystemParticipantTestData.groovy +++ b/src/test/groovy/edu/ie3/test/common/SystemParticipantTestData.groovy @@ -313,7 +313,9 @@ class SystemParticipantTestData { ) // Energy Management - public static final UUID[] connectedAssets = new UUID[]{loadInput.getUuid(), pvInput.getUuid()} + public static final UUID[] connectedAssets = new UUID[]{ + loadInput.getUuid(), pvInput.getUuid() + } public static final String emControlStrategy = "self_optimization" public static final emInput = new EmInput( UUID.fromString("977157f4-25e5-4c72-bf34-440edc778792"), @@ -353,5 +355,4 @@ class SystemParticipantTestData { [] as Set, [] as Set) } - } diff --git a/src/test/groovy/edu/ie3/test/common/ThermalUnitInputTestData.groovy b/src/test/groovy/edu/ie3/test/common/ThermalUnitInputTestData.groovy index 8df22686c..c5b7f61a8 100644 --- a/src/test/groovy/edu/ie3/test/common/ThermalUnitInputTestData.groovy +++ b/src/test/groovy/edu/ie3/test/common/ThermalUnitInputTestData.groovy @@ -66,5 +66,4 @@ class ThermalUnitInputTestData extends SystemParticipantTestData { inletTemp, returnTemp, c) - } diff --git a/src/test/groovy/edu/ie3/test/common/WeatherTestData.groovy b/src/test/groovy/edu/ie3/test/common/WeatherTestData.groovy index 5c74df196..950705e57 100644 --- a/src/test/groovy/edu/ie3/test/common/WeatherTestData.groovy +++ b/src/test/groovy/edu/ie3/test/common/WeatherTestData.groovy @@ -6,16 +6,20 @@ package edu.ie3.test.common import edu.ie3.datamodel.io.source.IdCoordinateSource +import edu.ie3.datamodel.io.source.csv.CsvTestDataMeta +import edu.ie3.util.geo.CoordinateDistance import edu.ie3.util.geo.GeoUtils import org.locationtech.jts.geom.Point +import tech.units.indriya.ComparableQuantity +import javax.measure.quantity.Length import java.util.stream.Collectors import java.util.stream.Stream abstract class WeatherTestData { - protected static final class DummyIdCoordinateSource implements IdCoordinateSource { - @Override + static final class DummyIdCoordinateSource implements CsvTestDataMeta, IdCoordinateSource { + Optional getCoordinate(int id) { switch (id) { case 193186: return Optional.of(GeoUtils.buildPoint(7d, 49d)) @@ -60,6 +64,16 @@ abstract class WeatherTestData { GeoUtils.buildPoint(7d, 51d) ] } + + @Override + List getNearestCoordinates(Point coordinate, int n) { + throw new UnsupportedOperationException("This method is not supported!") + } + + @Override + List getClosestCoordinates(Point coordinate, int n, ComparableQuantity distance) { + throw new UnsupportedOperationException("This method is not supported!") + } } public static final IdCoordinateSource coordinateSource = new DummyIdCoordinateSource() diff --git a/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/line_input.csv b/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/line_input.csv index c5f849c5c..e067b14d5 100644 --- a/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/line_input.csv +++ b/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/line_input.csv @@ -1,3 +1,3 @@ uuid,geo_position,id,length,node_a,node_b,olm_characteristic,operates_from,operates_until,operator,parallel_devices,type 92ec3bcf-1777-4d38-af67-0bf7c9fa73c7,"{""type"":""LineString"",""coordinates"":[[7.411111,51.492528],[7.414116,51.484136]],""crs"":{""type"":""name"",""properties"":{""name"":""EPSG:4326""}}}",test_line_AtoB,0.003,4ca90220-74c2-4369-9afa-a18bf068840d,47d29df0-ba2d-4d23-8e75-c82229c5c758,olm:{(0.00,1.00)},2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,2,3bed3eb3-9790-4874-89b5-a5434d408088 -91ec3bcf-1777-4d38-af67-0bf7c9fa73c7,"{""type"":""LineString"",""coordinates"":[[7.411111,51.492528],[7.414116,51.484136]],""crs"":{""type"":""name"",""properties"":{""name"":""EPSG:4326""}}}",test_line_CtoD,0.003,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,6e0980e0-10f2-4e18-862b-eb2b7c90509b,olm:{(0.00,1.00)},2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,2,3bed3eb3-9790-4874-89b5-a5434d408088 \ No newline at end of file +91ec3bcf-1777-4d38-af67-0bf7c9fa73c,"{""type"":""LineString"",""coordinates"":[[7.411111,51.492528],[7.414116,51.484136]],""crs"":{""type"":""name"",""properties"":{""name"":""EPSG:4326""}}}",test_line_CtoD,0.003,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,6e0980e0-10f2-4e18-862b-eb2b7c90509b,olm:{(0.00,1.00)},2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,2,3bed3eb3-9790-4874-89b5-a5434d408088 \ No newline at end of file diff --git a/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/measurement_unit_input.csv b/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/measurement_unit_input.csv index d9e432af9..d602fcd2c 100644 --- a/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/measurement_unit_input.csv +++ b/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/measurement_unit_input.csv @@ -1,2 +1,2 @@ uuid,v_ang,v_mag,id,node,operates_from,operates_until,operator,p,q -ce6119e3-f725-4166-b6e0-59f62e0c293d,true,true,test_measurementUnit,aaa74c1a-d07e-4615-99a5-e991f1d81cc4,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,true,true +c e6119e3-f725-4166-b6e0-59f62e0c293d,true,true,test_measurementUnit,aaa74c1a-d07e-4615-99a5-e991f1d81cc4,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,true,true diff --git a/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/node_input.csv b/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/node_input.csv index 6a9f52e29..743894f9a 100644 --- a/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/node_input.csv +++ b/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/node_input.csv @@ -1,7 +1,7 @@ uuid,geo_position,id,operates_from,operates_until,operator,slack,subnet,v_rated,v_target,volt_lvl 4ca90220-74c2-4369-9afa-a18bf068840d,"{""type"":""Point"",""coordinates"":[7.411111,51.492528],""crs"":{""type"":""name"",""properties"":{""name"":""EPSG:4326""}}}",node_a,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,true,1,380.0,1.0,Höchstspannung 47d29df0-ba2d-4d23-8e75-c82229c5c758,,node_b,,,,false,2,110.0,1.0,Hochspannung -bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,,node_c,,,,false,3,20.0,1.0,Mittelspannung +bd837a25-58f3-44ac-aa90-c6b6e3 cd91b2,,node_c,,,,false,3,20.0,1.0,Mittelspannung 98a3e7fa-c456-455b-a5ea-bb19e7cbeb63,,node_e,,,,false,5,10.0,1.0,Mittelspannung 9e37ce48-9650-44ec-b888-c2fd182aff01,,node_f,,,,false,6,0.4,1.0,Niederspannung aaa74c1a-d07e-4615-99a5-e991f1d81cc4,,node_g,,,,false,6,0.4,1.0,Niederspannung diff --git a/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/switch_input.csv b/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/switch_input.csv index af37806f6..5e9754c1c 100644 --- a/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/switch_input.csv +++ b/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/switch_input.csv @@ -1,2 +1,2 @@ uuid,closed,id,node_a,node_b,operates_from,operates_until,operator -5dc88077-aeb6-4711-9142-db57287640b1,true,test_switch_AtoB,4ca90220-74c2-4369-9afa-a18bf068840d,47d29df0-ba2d-4d23-8e75-c82229c5c758,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92 +5dc88077-aeb6-4711-9142-db57287640b1,true,test_switch_AtoB,4ca90220-74c2-4369-9afa -a18bf068840d,47d29df0-ba2d-4d23-8e75-c82229c5c758,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92 diff --git a/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/transformer_2_w_input.csv b/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/transformer_2_w_input.csv index 564775304..d1a429620 100644 --- a/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/transformer_2_w_input.csv +++ b/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/transformer_2_w_input.csv @@ -1,6 +1,6 @@ uuid,auto_tap,id,node_a,node_b,operates_from,operates_until,operator,parallel_devices,tap_pos,type 58247de7-e297-4d9b-a5e4-b662c058c655,true,2w_single_test,47d29df0-ba2d-4d23-8e75-c82229c5c758,6e0980e0-10f2-4e18-862b-eb2b7c90509b,,,,1,0,202069a7-bcf8-422c-837c-273575220c8a -8542bfa5-dc34-4367-b549-e9f515e6cced,true,2w_v_1,47d29df0-ba2d-4d23-8e75-c82229c5c758,98a3e7fa-c456-455b-a5ea-bb19e7cbeb63,,,,1,0,ac30443b-29e7-4635-b399-1062cfb3ffda +8542bfa5-dc34-4367-b549-e9f515e6cced,true,2w_v_1,47d29df0-ba2d-4d23-8e75-c 82229c5c758,98a3e7fa-c456-455b-a5ea-bb19e7cbeb63,,,,1,0,ac30443b-29e7-4635-b399-1062cfb3ffda 0c03391d-47e1-49b3-9c9c-1616258e78a7,true,2w_v_2,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,98a3e7fa-c456-455b-a5ea-bb19e7cbeb63,,,,1,0,8441dd78-c528-4e63-830d-52d341131432 26a3583e-8e62-40b7-ba4c-092f6fd5a70d,true,2w_parallel_1,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,9e37ce48-9650-44ec-b888-c2fd182aff01,,,,1,0,08559390-d7c0-4427-a2dc-97ba312ae0ac 5dc88077-aeb6-4711-9142-db57292640b1,true,2w_parallel_2,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,aaa74c1a-d07e-4615-99a5-e991f1d81cc4,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,1,0,08559390-d7c0-4427-a2dc-97ba312ae0ac diff --git a/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/transformer_3_w_input.csv b/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/transformer_3_w_input.csv index b970ceefb..8a8d3e8a4 100644 --- a/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/transformer_3_w_input.csv +++ b/src/test/resources/edu/ie3/datamodel/io/source/csv/_grid/malformed/transformer_3_w_input.csv @@ -1,2 +1,2 @@ uuid,auto_tap,id,node_a,node_b,node_c,operates_from,operates_until,operator,parallel_devices,tap_pos,type -cc327469-7d56-472b-a0df-edbb64f90e8f,true,3w_test,4ca90220-74c2-4369-9afa-a18bf068840d,47d29df0-ba2d-4d23-8e75-c82229c5c758,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,1,0,5b0ee546-21fb-4a7f-a801-5dbd3d7bb356 +cc327469-7d56-472b-a0df-edbb6 4f90e8f,true,3w_test,4ca90220-74c2-4369-9afa-a18bf068840d,47d29df0-ba2d-4d23-8e75-c82229c5c758,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,1,0,5b0ee546-21fb-4a7f-a801-5dbd3d7bb356 diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_coordinates/coordinates.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_coordinates/coordinates.sql new file mode 100644 index 000000000..cc9e69a1f --- /dev/null +++ b/src/test/resources/edu/ie3/datamodel/io/source/sql/_coordinates/coordinates.sql @@ -0,0 +1,19 @@ +CREATE TABLE public.coordinates +( + id integer, + coordinate geography(POINT) +) + WITH ( + OIDS = FALSE + ) + TABLESPACE pg_default; + +CREATE INDEX idx ON public.coordinates USING gist (coordinate); + +INSERT INTO + public.coordinates(id, coordinate) +VALUES +(67775, ST_POINT(7.438, 51.5)), +(531137, ST_POINT(7.375, 51.5)), +(551525, ST_POINT(7.438, 51.438)), +(278150, ST_POINT(7.375, 51.438)) \ No newline at end of file diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_types/types.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_types/types.sql new file mode 100644 index 000000000..13bb195bf --- /dev/null +++ b/src/test/resources/edu/ie3/datamodel/io/source/sql/_types/types.sql @@ -0,0 +1,42 @@ +CREATE TABLE public.line_type_input +( + uuid uuid NOT NULL, + id varchar NOT NULL, + v_rated double precision NOT NULL, + i_max double precision NOT NULL, + r double precision NOT NULL, + x double precision NOT NULL, + b double precision NOT NULL, + g double precision NOT NULL, + PRIMARY KEY (uuid) +); + +CREATE TABLE public.transformer_2_w_type_input +( + uuid uuid NOT NULL, + id varchar NOT NULL, + s_rated double precision NOT NULL, + r_sc double precision NOT NULL, + x_sc double precision NOT NULL, + b_m double precision NOT NULL, + g_m double precision NOT NULL, + d_phi double precision NOT NULL, + d_v double precision NOT NULL, + tap_max int NOT NULL, + tap_min int NOT NULL, + tap_neutr int NOT NULL, + tap_side bool NOT NULL, + v_rated_a double precision NOT NULL, + v_rated_b double precision NOT NULL, + PRIMARY KEY (uuid) +); + +INSERT INTO + public.line_type_input (uuid, id, v_rated, i_max, r, x, b, g) +VALUES + ('3bed3eb3-9790-4874-89b5-a5434d408088', 'lineType_AtoB', 0.00322, 0.0, 0.437, 0.437, 300.0, 20.0); + +INSERT INTO + public.transformer_2_w_type_input (uuid,b_m,d_phi,d_v,g_m,id,r_sc,s_rated,tap_max,tap_min,tap_neutr,tap_side,v_rated_a,v_rated_b,x_sc) +VALUES + ('202069a7-bcf8-422c-837c-273575220c8a',0.0,0.0,1.5,0.0,'HS-MS_1',45.375,20000.0,10,-10,0,false,110.0,20.0,102.759); \ No newline at end of file diff --git a/version.properties b/version.properties index dcf9d3086..06287583e 100644 --- a/version.properties +++ b/version.properties @@ -1,8 +1,8 @@ #Generated by the Semver Plugin for Gradle -#Wed Jan 05 11:45:20 CET 2022 +#Thu Feb 16 11:19:35 CET 2023 version.buildmeta= -version.major=3 +version.major=4 version.minor=0 version.patch=0 version.prerelease= -version.semver=3.0.0 +version.semver=4.0.0