diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index bb257e95e..000000000 --- a/.travis.yml +++ /dev/null @@ -1,11 +0,0 @@ -dist: trusty -language: java -jdk: - - oraclejdk8 -script: - - chmod -R ug+x .travis - - .travis/build.sh -notifications: - webhooks: https://simona.ie3.e-technik.tu-dortmund.de/chat/hooks/RtG988s8R4iY3vM32/6JNKKYCwq9DYbqWgkFBmmdzRiAHvXAgLQeNyWNKnfWyjvHR3 -after_success: - - bash <(curl -s https://codecov.io/bash) diff --git a/docs/uml/main/DataSourceClassDiagramm.puml b/docs/uml/main/DataSourceClassDiagramm.puml index 69da7c144..e82536167 100644 --- a/docs/uml/main/DataSourceClassDiagramm.puml +++ b/docs/uml/main/DataSourceClassDiagramm.puml @@ -51,29 +51,31 @@ JDBCGridDataSource --> JDBCDataConnector class CSVGridDataSource { } -CSVGridDataSource --|> GridDataSource -CSVGridDataSource --> CSVDataConnector +CSVGridDataSource ..|> GridDataSource +CSVGridDataSource ..|> CSVDataConnector interface AssetDataSource { -{abstract}Future fetchAssetData() -{abstract}Future> fetchEvs() -{abstract}Future> fetchWecs() -{abstract}Future> fetch...() +{abstract}Set fetchEvs() +{abstract}Set fetchWecPlants() +{abstract}Set<...> fetch...() } -AssetDataSource --|> DataSource +AssetDataSource ..|> DataSource + +interface ThermalSource +ThermalSource ..|> DataSource interface TypeDataSource { {abstract}Future fetchTypeData() {abstract}Future> fetchEvTypes() {abstract}Future> fetch...() } -TypeDataSource --|> DataSource +TypeDataSource ..|> DataSource interface GraphicDataSource { {abstract}Future fetchGraphicData() {abstract}Future> fetch...() } -GraphicDataSource --|> DataSource +GraphicDataSource ..|> DataSource interface WeatherDataSource { {abstract}Future fetchWeatherData() diff --git a/docs/uml/main/InputDataDeployment.puml b/docs/uml/main/InputDataDeployment.puml index 7f5735f7c..8ed2d47f9 100644 --- a/docs/uml/main/InputDataDeployment.puml +++ b/docs/uml/main/InputDataDeployment.puml @@ -47,6 +47,7 @@ weather interface grid_source interface assets_source interface types_source +interface thermal_source interface graphics_source interface weather_source interface time_series_source @@ -64,6 +65,9 @@ assets_source --> assets types_source --> psql types_source --> types +thermal_source --> psql +thermal_source --> types + graphics_source --> psql graphics_source --> graphics @@ -81,6 +85,7 @@ inputAccumulator --> types_source inputAccumulator --> graphics_source inputAccumulator --> weather_source inputAccumulator --> time_series_source +inputAccumulator --> thermal_source projName_model.conf --> config_source diff --git a/docs/uml/main/InputDatamodelConcept.puml b/docs/uml/main/InputDatamodelConcept.puml index 929139a86..50e0f03ff 100644 --- a/docs/uml/main/InputDatamodelConcept.puml +++ b/docs/uml/main/InputDatamodelConcept.puml @@ -35,6 +35,7 @@ OperatorInput --|> InputEntity abstract Class ConnectorInput { + nodeA: NodeInput + nodeB: NodeInput + + parallelDevices: Integer } ConnectorInput --|> AssetInput ConnectorInput ..|> HasNodes diff --git a/src/main/java/edu/ie3/datamodel/exceptions/SinkException.java b/src/main/java/edu/ie3/datamodel/exceptions/SinkException.java index 03e543af7..398dcf736 100644 --- a/src/main/java/edu/ie3/datamodel/exceptions/SinkException.java +++ b/src/main/java/edu/ie3/datamodel/exceptions/SinkException.java @@ -12,7 +12,7 @@ * @version 0.1 * @since 19.03.20 */ -public class SinkException extends RuntimeException { +public class SinkException extends Exception { public SinkException(final String message, final Throwable cause) { super(message, cause); } diff --git a/src/main/java/edu/ie3/datamodel/exceptions/SourceException.java b/src/main/java/edu/ie3/datamodel/exceptions/SourceException.java new file mode 100644 index 000000000..0384a51e6 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/exceptions/SourceException.java @@ -0,0 +1,27 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.exceptions; + +/** + * Exception that should be used whenever an error occurs in a instance of a {@link + * edu.ie3.datamodel.io.source.DataSource} + * + * @version 0.1 + * @since 19.03.20 + */ +public class SourceException extends Exception { + public SourceException(final String message, final Throwable cause) { + super(message, cause); + } + + public SourceException(final Throwable cause) { + super(cause); + } + + public SourceException(final String message) { + super(message); + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java b/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java index b716ac33e..008711b14 100644 --- a/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java +++ b/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java @@ -13,8 +13,10 @@ import edu.ie3.datamodel.models.timeseries.TimeSeries; import edu.ie3.datamodel.models.timeseries.TimeSeriesEntry; import edu.ie3.datamodel.models.value.Value; +import java.io.*; import java.io.File; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.*; import java.util.stream.Stream; import org.apache.logging.log4j.LogManager; @@ -36,6 +38,8 @@ public class CsvFileConnector implements DataConnector { private final FileNamingStrategy fileNamingStrategy; private final String baseFolderName; + private static final String FILE_ENDING = ".csv"; + public CsvFileConnector(String baseFolderName, FileNamingStrategy fileNamingStrategy) { this.baseFolderName = baseFolderName; this.fileNamingStrategy = fileNamingStrategy; @@ -84,21 +88,68 @@ BufferedCsvWriter getOrInitWriter(T timeSeries, String[] headerElements, String /** * Initializes a writer with the given base folder and file definition * - * @param baseFolderName Base folder, where the file hierarchy should start + * @param baseFolder Base folder, where the file hierarchy should start * @param fileDefinition Definition of the files shape * @return an initialized buffered writer * @throws ConnectorException If the base folder is a file * @throws IOException If the writer cannot be initialized correctly */ - private BufferedCsvWriter initWriter(String baseFolderName, CsvFileDefinition fileDefinition) + private BufferedCsvWriter initWriter(String baseFolder, CsvFileDefinition fileDefinition) throws ConnectorException, IOException { - File basePathDir = new File(baseFolderName); + File basePathDir = new File(baseFolder); if (basePathDir.isFile()) throw new ConnectorException( - "Base path dir '" + baseFolderName + "' already exists and is a file!"); + "Base path dir '" + baseFolder + "' already exists and is a file!"); if (!basePathDir.exists()) basePathDir.mkdirs(); - return new BufferedCsvWriter(baseFolderName, fileDefinition); + String fullPathToFile = baseFolder + File.separator + fileDefinition.getFilePath(); + + File pathFile = new File(fullPathToFile); + if (!pathFile.exists()) { + return new BufferedCsvWriter(baseFolder, fileDefinition, false, true); + } + log.warn( + "File '{}.csv' already exist. Will append new content WITHOUT new header! Full path: {}", + fileDefinition.getFileName(), + pathFile.getAbsolutePath()); + return new BufferedCsvWriter(baseFolder, fileDefinition, false, false); + } + + /** + * Initializes a file reader for the given class that should be read in. The expected file name is + * determined based on {@link FileNamingStrategy} of the this {@link CsvFileConnector} instance + * + * @param clz the class of the entity that should be read + * @return the reader that contains information about the file to be read in + * @throws FileNotFoundException + */ + public BufferedReader initReader(Class clz) throws FileNotFoundException { + + BufferedReader newReader; + + String fileName = null; + try { + fileName = + fileNamingStrategy + .getFileName(clz) + .orElseThrow( + () -> + new ConnectorException( + "Cannot find a naming strategy for class '" + + clz.getSimpleName() + + "'.")); + } catch (ConnectorException e) { + log.error( + "Cannot get reader for entity '{}' as no file naming strategy for this file exists. Exception:{}", + clz::getSimpleName, + () -> e); + } + File filePath = new File(baseFolderName + File.separator + fileName + FILE_ENDING); + newReader = + new BufferedReader( + new InputStreamReader(new FileInputStream(filePath), StandardCharsets.UTF_8), 16384); + + return newReader; } /** diff --git a/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java b/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java index 196132b26..0a320c941 100644 --- a/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java +++ b/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java @@ -31,7 +31,8 @@ public class BufferedCsvWriter extends BufferedWriter { * @param quoted True, if the entries may be quoted * @throws IOException If the FileOutputStream cannot be established. */ - public BufferedCsvWriter(String baseFolder, CsvFileDefinition fileDefinition, boolean quoted) + public BufferedCsvWriter( + String baseFolder, CsvFileDefinition fileDefinition, boolean quoted, boolean writeHeader) throws IOException { super( new OutputStreamWriter( @@ -39,7 +40,7 @@ public BufferedCsvWriter(String baseFolder, CsvFileDefinition fileDefinition, bo StandardCharsets.UTF_8)); this.fileDefinition = fileDefinition; this.quoted = quoted; - writeFileHeader(); + if (writeHeader) writeFileHeader(fileDefinition.headLineElements); } /** @@ -49,8 +50,9 @@ public BufferedCsvWriter(String baseFolder, CsvFileDefinition fileDefinition, bo * @param fileDefinition The foreseen shape of the file * @throws IOException If the FileOutputStream cannot be established. */ - public BufferedCsvWriter(String baseFolder, CsvFileDefinition fileDefinition) throws IOException { - this(baseFolder, fileDefinition, true); + public BufferedCsvWriter(String baseFolder, CsvFileDefinition fileDefinition, boolean writeHeader) + throws IOException { + this(baseFolder, fileDefinition, false, writeHeader); } /** @@ -58,7 +60,7 @@ public BufferedCsvWriter(String baseFolder, CsvFileDefinition fileDefinition) th * * @param entityFieldData a mapping of an entity instance fields to their values */ - public void write(Map entityFieldData) throws IOException { + public void write(Map entityFieldData) throws IOException, SinkException { /* Check against eligible head line elements */ String[] eligibleHeadLineElements = fileDefinition.getHeadLineElements(); if (entityFieldData.size() != eligibleHeadLineElements.length @@ -77,11 +79,8 @@ public void write(Map entityFieldData) throws IOException { * * @throws IOException If something is messed up */ - private void writeFileHeader() throws IOException { - writeOneLine( - quoted - ? StringUtils.quote(StringUtils.camelCaseToSnakeCase(fileDefinition.headLineElements)) - : StringUtils.camelCaseToSnakeCase(fileDefinition.headLineElements)); + private void writeFileHeader(String[] headLineElements) throws IOException { + writeOneLine(StringUtils.quote(StringUtils.camelCaseToSnakeCase(headLineElements))); } /** diff --git a/src/main/java/edu/ie3/datamodel/io/extractor/Extractor.java b/src/main/java/edu/ie3/datamodel/io/extractor/Extractor.java index 660a0f0e8..90cea7389 100644 --- a/src/main/java/edu/ie3/datamodel/io/extractor/Extractor.java +++ b/src/main/java/edu/ie3/datamodel/io/extractor/Extractor.java @@ -7,8 +7,11 @@ import edu.ie3.datamodel.exceptions.ExtractorException; import edu.ie3.datamodel.models.Operable; +import edu.ie3.datamodel.models.input.AssetTypeInput; import edu.ie3.datamodel.models.input.InputEntity; +import edu.ie3.datamodel.models.input.OperatorInput; import java.util.*; +import java.util.concurrent.CopyOnWriteArrayList; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -27,23 +30,24 @@ private Extractor() { throw new IllegalStateException("Utility classes cannot be instantiated"); } - public static List extractElements(NestedEntity nestedEntity) + public static Set extractElements(NestedEntity nestedEntity) throws ExtractorException { - List resultingList = new ArrayList<>(); + CopyOnWriteArrayList resultingList = new CopyOnWriteArrayList<>(); if (nestedEntity instanceof HasNodes) { resultingList.addAll(((HasNodes) nestedEntity).allNodes()); } + if (nestedEntity instanceof Operable) { + extractOperator((Operable) nestedEntity).ifPresent(resultingList::add); + } if (nestedEntity instanceof HasType) { - resultingList.add(((HasType) nestedEntity).getType()); + resultingList.add(extractType((HasType) nestedEntity)); } - if (nestedEntity instanceof Operable) { - resultingList.add(((Operable) nestedEntity).getOperator()); + if (nestedEntity instanceof HasThermalBus) { + resultingList.add(((HasThermalBus) nestedEntity).getThermalBus()); } - - if (nestedEntity instanceof HasBus) { - resultingList.add(((HasBus) nestedEntity).getBus()); + if (nestedEntity instanceof HasThermalStorage) { + resultingList.add(((HasThermalStorage) nestedEntity).getThermalStorage()); } - if (nestedEntity instanceof HasLine) { resultingList.add(((HasLine) nestedEntity).getLine()); } @@ -54,7 +58,7 @@ public static List extractElements(NestedEntity nestedEntity) nestedEntity.getClass().getSimpleName()); } - if (resultingList.isEmpty()) { + if (resultingList.isEmpty() && !(nestedEntity instanceof Operable)) { throw new ExtractorException( "Unable to extract entity of class '" + nestedEntity.getClass().getSimpleName() @@ -64,6 +68,32 @@ public static List extractElements(NestedEntity nestedEntity) + "sub-interfaces correctly?"); } - return Collections.unmodifiableList(resultingList); + resultingList.stream() + .parallel() + .forEach( + element -> { + if (element instanceof NestedEntity) { + try { + resultingList.addAll(extractElements((NestedEntity) element)); + } catch (ExtractorException e) { + log.error( + "An error occurred during extraction of nested entity '{}':{}", + () -> element.getClass().getSimpleName(), + () -> e); + } + } + }); + + return Collections.unmodifiableSet(new HashSet<>(resultingList)); + } + + public static AssetTypeInput extractType(HasType entityWithType) { + return entityWithType.getType(); + } + + public static Optional extractOperator(Operable entityWithOperator) { + return entityWithOperator.getOperator().getId().equalsIgnoreCase("NO_OPERATOR_ASSIGNED") + ? Optional.empty() + : Optional.of(entityWithOperator.getOperator()); } } diff --git a/src/main/java/edu/ie3/datamodel/io/extractor/HasBus.java b/src/main/java/edu/ie3/datamodel/io/extractor/HasThermalBus.java similarity index 84% rename from src/main/java/edu/ie3/datamodel/io/extractor/HasBus.java rename to src/main/java/edu/ie3/datamodel/io/extractor/HasThermalBus.java index 60c1958ac..fd9fb8f8a 100644 --- a/src/main/java/edu/ie3/datamodel/io/extractor/HasBus.java +++ b/src/main/java/edu/ie3/datamodel/io/extractor/HasThermalBus.java @@ -14,7 +14,7 @@ * @version 0.1 * @since 31.03.20 */ -public interface HasBus extends NestedEntity { +public interface HasThermalBus extends NestedEntity { - ThermalBusInput getBus(); + ThermalBusInput getThermalBus(); } diff --git a/src/main/java/edu/ie3/datamodel/io/extractor/HasThermalStorage.java b/src/main/java/edu/ie3/datamodel/io/extractor/HasThermalStorage.java new file mode 100644 index 000000000..94fe74186 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/extractor/HasThermalStorage.java @@ -0,0 +1,20 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.extractor; + +import edu.ie3.datamodel.models.input.thermal.ThermalStorageInput; + +/** + * Interface that should be implemented by all elements holding a {@link ThermalStorageInput} + * elements and should be processable by the {@link Extractor}. + * + * @version 0.1 + * @since 31.03.20 + */ +public interface HasThermalStorage { + + ThermalStorageInput getThermalStorage(); +} diff --git a/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java index f089f7c5a..3796266af 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java @@ -12,10 +12,7 @@ import edu.ie3.datamodel.models.UniqueEntity; import edu.ie3.datamodel.models.voltagelevels.GermanVoltageLevelUtils; import edu.ie3.datamodel.models.voltagelevels.VoltageLevel; -import java.util.Map; -import java.util.Optional; -import java.util.TreeMap; -import java.util.UUID; +import java.util.*; import javax.measure.Quantity; import javax.measure.Unit; import javax.measure.quantity.ElectricPotential; @@ -286,4 +283,28 @@ public > ComparableQuantity getQuantity(String field, U public Class getEntityClass() { return entityClass; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EntityData that = (EntityData) o; + return fieldsToAttributes.equals(that.fieldsToAttributes) + && entityClass.equals(that.entityClass); + } + + @Override + public int hashCode() { + return Objects.hash(fieldsToAttributes, entityClass); + } + + @Override + public String toString() { + return "EntityData{" + + "fieldsToAttributes=" + + fieldsToAttributes + + ", entityClass=" + + entityClass + + '}'; + } } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/EntityFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/EntityFactory.java index ef7c9795b..c36ab3740 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/EntityFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/EntityFactory.java @@ -71,7 +71,10 @@ public Optional getEntity(D data) { private void isValidClass(Class entityClass) { if (!classes.contains(entityClass)) throw new FactoryException( - "Cannot process " + entityClass.getSimpleName() + ".class with this factory!"); + "Cannot process " + + entityClass.getSimpleName() + + ".class with this factory!\nThis factory can only process the following classes:\n - " + + classes.stream().map(Class::getSimpleName).collect(Collectors.joining("\n - "))); } /** @@ -158,7 +161,7 @@ protected int validateParameters(D data, Set... fieldSets) { String providedFieldMapString = fieldsToValues.keySet().stream() .map(key -> key + " -> " + fieldsToValues.get(key)) - .collect(Collectors.joining(",")); + .collect(Collectors.joining(",\n")); String providedKeysString = "[" + String.join(", ", fieldsToValues.keySet()) + "]"; @@ -167,14 +170,14 @@ protected int validateParameters(D data, Set... fieldSets) { throw new FactoryException( "The provided fields " + providedKeysString - + " with data {" + + " with data \n{" + providedFieldMapString + "}" + " are invalid for instance of " + data.getEntityClass().getSimpleName() - + ". \nThe following fields to be passed to a constructor of " + + ". \nThe following fields to be passed to a constructor of '" + data.getEntityClass().getSimpleName() - + " are possible:\n" + + "' are possible (NOT case-sensitive!):\n" + possibleOptions); } } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/AssetInputEntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/input/AssetInputEntityData.java index 50999707f..de52640cc 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/AssetInputEntityData.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/AssetInputEntityData.java @@ -9,7 +9,7 @@ import edu.ie3.datamodel.models.UniqueEntity; import edu.ie3.datamodel.models.input.OperatorInput; import java.util.Map; -import java.util.Optional; +import java.util.Objects; /** * Data used for the construction of {@link edu.ie3.datamodel.models.input.AssetInput} entities. @@ -45,7 +45,33 @@ public AssetInputEntityData( this.operator = operator; } - public Optional getOperatorInput() { - return Optional.ofNullable(operator); + public OperatorInput getOperatorInput() { + return operator; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + AssetInputEntityData that = (AssetInputEntityData) o; + return operator.equals(that.operator); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), operator); + } + + @Override + public String toString() { + return "AssetInputEntityData{" + + "fieldsToValues=" + + getFieldsToValues() + + ", entityClass=" + + getEntityClass() + + ", operatorInput=" + + operator + + "} "; } } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/AssetInputEntityFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/input/AssetInputEntityFactory.java index aaab491e2..10efb7cbf 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/AssetInputEntityFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/AssetInputEntityFactory.java @@ -71,10 +71,10 @@ protected List> getFields(D data) { protected T buildModel(D data) { UUID uuid = data.getUUID(UUID); String id = data.getField(ID); - Optional operator = data.getOperatorInput(); + OperatorInput operator = data.getOperatorInput(); OperationTime operationTime = buildOperationTime(data); - return buildModel(data, uuid, id, operator.orElse(null), operationTime); + return buildModel(data, uuid, id, operator, operationTime); } /** diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/ConnectorInputEntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/input/ConnectorInputEntityData.java index c003302ee..afdab00a5 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/ConnectorInputEntityData.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/ConnectorInputEntityData.java @@ -9,6 +9,7 @@ import edu.ie3.datamodel.models.input.NodeInput; import edu.ie3.datamodel.models.input.OperatorInput; import java.util.Map; +import java.util.Objects; /** * Data used by {@link ConnectorInputEntityFactory} to create an instance of {@link @@ -48,4 +49,34 @@ public NodeInput getNodeA() { public NodeInput getNodeB() { return nodeB; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + ConnectorInputEntityData that = (ConnectorInputEntityData) o; + return nodeA.equals(that.nodeA) && nodeB.equals(that.nodeB); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), nodeA, nodeB); + } + + @Override + public String toString() { + return "ConnectorInputEntityData{" + + "fieldsToValues=" + + getFieldsToValues() + + ", entityClass=" + + getEntityClass() + + ", operatorInput=" + + getOperatorInput() + + ", nodeA=" + + nodeA + + ", nodeB=" + + nodeB + + '}'; + } } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/CylindricalStorageInputFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/input/CylindricalStorageInputFactory.java index e0456c17c..0d2d8c31f 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/CylindricalStorageInputFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/CylindricalStorageInputFactory.java @@ -52,6 +52,15 @@ protected CylindricalStorageInput buildModel( final ComparableQuantity c = data.getQuantity(C, StandardUnits.SPECIFIC_HEAT_CAPACITY); return new CylindricalStorageInput( - uuid, id, bus, storageVolumeLvl, storageVolumeLvlMin, inletTemp, returnTemp, c); + uuid, + id, + operator, + operationTime, + bus, + storageVolumeLvl, + storageVolumeLvlMin, + inletTemp, + returnTemp, + c); } } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/LineInputEntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/input/LineInputEntityData.java deleted file mode 100644 index 735ba3e3e..000000000 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/LineInputEntityData.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * © 2020. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.factory.input; - -import edu.ie3.datamodel.models.UniqueEntity; -import edu.ie3.datamodel.models.input.NodeInput; -import edu.ie3.datamodel.models.input.OperatorInput; -import edu.ie3.datamodel.models.input.connector.type.LineTypeInput; -import java.util.Map; - -public class LineInputEntityData extends ConnectorInputEntityData { - private final LineTypeInput type; - - public LineInputEntityData( - Map fieldsToAttributes, - Class entityClass, - NodeInput nodeA, - NodeInput nodeB, - LineTypeInput type) { - super(fieldsToAttributes, entityClass, nodeA, nodeB); - this.type = type; - } - - public LineInputEntityData( - Map fieldsToAttributes, - Class entityClass, - OperatorInput operator, - NodeInput nodeA, - NodeInput nodeB, - LineTypeInput type) { - super(fieldsToAttributes, entityClass, operator, nodeA, nodeB); - this.type = type; - } - - public LineTypeInput getType() { - return type; - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/LineInputFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/input/LineInputFactory.java index 68ae28696..a73010a25 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/LineInputFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/LineInputFactory.java @@ -20,7 +20,8 @@ import org.locationtech.jts.geom.LineString; import tec.uom.se.ComparableQuantity; -public class LineInputFactory extends ConnectorInputEntityFactory { +public class LineInputFactory + extends ConnectorInputEntityFactory> { private static final String LENGTH = "length"; private static final String GEO_POSITION = "geoposition"; private static final String OLM_CHARACTERISTIC = "olmcharacteristic"; @@ -36,7 +37,7 @@ protected String[] getAdditionalFields() { @Override protected LineInput buildModel( - LineInputEntityData data, + TypedConnectorInputEntityData data, UUID uuid, String id, NodeInput nodeA, diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputEntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputEntityData.java deleted file mode 100644 index b2a6584c4..000000000 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputEntityData.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * © 2020. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.factory.input; - -import edu.ie3.datamodel.models.UniqueEntity; -import edu.ie3.datamodel.models.input.NodeInput; -import edu.ie3.datamodel.models.input.OperatorInput; -import java.util.Map; - -public class MeasurementUnitInputEntityData extends AssetInputEntityData { - private final NodeInput node; - - public MeasurementUnitInputEntityData( - Map fieldsToAttributes, - Class entityClass, - NodeInput node) { - super(fieldsToAttributes, entityClass); - this.node = node; - } - - public MeasurementUnitInputEntityData( - Map fieldsToAttributes, - Class entityClass, - OperatorInput operator, - NodeInput node) { - super(fieldsToAttributes, entityClass, operator); - this.node = node; - } - - public NodeInput getNode() { - return node; - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputFactory.java index 3db7298a9..30b003e74 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputFactory.java @@ -12,7 +12,7 @@ import java.util.UUID; public class MeasurementUnitInputFactory - extends AssetInputEntityFactory { + extends AssetInputEntityFactory { private static final String V_MAG = "vmag"; private static final String V_ANG = "vang"; private static final String P = "p"; @@ -29,7 +29,7 @@ protected String[] getAdditionalFields() { @Override protected MeasurementUnitInput buildModel( - MeasurementUnitInputEntityData data, + NodeAssetInputEntityData data, UUID uuid, String id, OperatorInput operator, diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/participant/SystemParticipantEntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/input/NodeAssetInputEntityData.java similarity index 56% rename from src/main/java/edu/ie3/datamodel/io/factory/input/participant/SystemParticipantEntityData.java rename to src/main/java/edu/ie3/datamodel/io/factory/input/NodeAssetInputEntityData.java index 661d08fd8..8318a1437 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/participant/SystemParticipantEntityData.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/NodeAssetInputEntityData.java @@ -3,32 +3,32 @@ * Institute of Energy Systems, Energy Efficiency and Energy Economics, * Research group Distribution grid planning and operation */ -package edu.ie3.datamodel.io.factory.input.participant; +package edu.ie3.datamodel.io.factory.input; -import edu.ie3.datamodel.io.factory.input.AssetInputEntityData; import edu.ie3.datamodel.models.UniqueEntity; import edu.ie3.datamodel.models.input.NodeInput; import edu.ie3.datamodel.models.input.OperatorInput; import java.util.Map; +import java.util.Objects; /** - * Data used by {@link SystemParticipantInputEntityFactory} to create an instance of {@link - * edu.ie3.datamodel.models.input.system.SystemParticipantInput}, thus needing additional - * information about the {@link edu.ie3.datamodel.models.input.NodeInput}, which cannot be provided - * through the attribute map. + * Data used by all factories used to create instances of {@link + * edu.ie3.datamodel.models.input.InputEntity}s holding one {@link NodeInput} entity, thus needing + * additional information about the {@link edu.ie3.datamodel.models.input.NodeInput}, which cannot + * be provided through the attribute map. */ -public class SystemParticipantEntityData extends AssetInputEntityData { +public class NodeAssetInputEntityData extends AssetInputEntityData { private final NodeInput node; /** - * Creates a new SystemParticipantEntityData object for an operated, always on system participant + * Creates a new UntypedSingleNodeEntityData object for an operated, always on system participant * input * * @param fieldsToAttributes attribute map: field name -> value * @param entityClass class of the entity to be created with this data * @param node input node */ - public SystemParticipantEntityData( + public NodeAssetInputEntityData( Map fieldsToAttributes, Class entityClass, NodeInput node) { @@ -37,14 +37,14 @@ public SystemParticipantEntityData( } /** - * Creates a new SystemParticipantEntityData object for an operable system participant input + * Creates a new UntypedSingleNodeEntityData object for an operable system participant input * * @param fieldsToAttributes attribute map: field name -> value * @param entityClass class of the entity to be created with this data * @param node input node * @param operator operator input */ - public SystemParticipantEntityData( + public NodeAssetInputEntityData( Map fieldsToAttributes, Class entityClass, OperatorInput operator, @@ -56,4 +56,18 @@ public SystemParticipantEntityData( public NodeInput getNode() { return node; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + NodeAssetInputEntityData that = (NodeAssetInputEntityData) o; + return getNode().equals(that.getNode()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), getNode()); + } } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/ThermalHouseInputFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/input/ThermalHouseInputFactory.java index ef9d5eb85..b29ab1d0d 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/ThermalHouseInputFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/ThermalHouseInputFactory.java @@ -41,6 +41,6 @@ protected ThermalHouseInput buildModel( data.getQuantity(ETH_LOSSES, StandardUnits.THERMAL_TRANSMISSION); final ComparableQuantity ethCapa = data.getQuantity(ETH_CAPA, StandardUnits.HEAT_CAPACITY); - return new ThermalHouseInput(uuid, id, busInput, ethLosses, ethCapa); + return new ThermalHouseInput(uuid, id, operator, operationTime, busInput, ethLosses, ethCapa); } } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/Transformer2WInputEntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/input/Transformer2WInputEntityData.java deleted file mode 100644 index 8f6e1b271..000000000 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/Transformer2WInputEntityData.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * © 2020. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.factory.input; - -import edu.ie3.datamodel.models.UniqueEntity; -import edu.ie3.datamodel.models.input.NodeInput; -import edu.ie3.datamodel.models.input.OperatorInput; -import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput; -import java.util.Map; - -public class Transformer2WInputEntityData extends ConnectorInputEntityData { - private final Transformer2WTypeInput type; - - public Transformer2WInputEntityData( - Map fieldsToAttributes, - Class entityClass, - NodeInput nodeA, - NodeInput nodeB, - Transformer2WTypeInput type) { - super(fieldsToAttributes, entityClass, nodeA, nodeB); - this.type = type; - } - - public Transformer2WInputEntityData( - Map fieldsToAttributes, - Class entityClass, - OperatorInput operator, - NodeInput nodeA, - NodeInput nodeB, - Transformer2WTypeInput type) { - super(fieldsToAttributes, entityClass, operator, nodeA, nodeB); - this.type = type; - } - - public Transformer2WTypeInput getType() { - return type; - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/Transformer2WInputFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/input/Transformer2WInputFactory.java index b2d19d1f0..13ffc0905 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/Transformer2WInputFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/Transformer2WInputFactory.java @@ -13,7 +13,8 @@ import java.util.UUID; public class Transformer2WInputFactory - extends ConnectorInputEntityFactory { + extends ConnectorInputEntityFactory< + Transformer2WInput, TypedConnectorInputEntityData> { private static final String TAP_POS = "tappos"; private static final String AUTO_TAP = "autotap"; @@ -29,7 +30,7 @@ protected String[] getAdditionalFields() { @Override protected Transformer2WInput buildModel( - Transformer2WInputEntityData data, + TypedConnectorInputEntityData data, UUID uuid, String id, NodeInput nodeA, diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/Transformer3WInputEntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/input/Transformer3WInputEntityData.java index 472244bb5..b06022b35 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/Transformer3WInputEntityData.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/Transformer3WInputEntityData.java @@ -10,10 +10,11 @@ import edu.ie3.datamodel.models.input.OperatorInput; import edu.ie3.datamodel.models.input.connector.type.Transformer3WTypeInput; import java.util.Map; +import java.util.Objects; -public class Transformer3WInputEntityData extends ConnectorInputEntityData { +public class Transformer3WInputEntityData + extends TypedConnectorInputEntityData { private final NodeInput nodeC; - private final Transformer3WTypeInput type; public Transformer3WInputEntityData( Map fieldsToAttributes, @@ -22,9 +23,8 @@ public Transformer3WInputEntityData( NodeInput nodeB, NodeInput nodeC, Transformer3WTypeInput type) { - super(fieldsToAttributes, entityClass, nodeA, nodeB); + super(fieldsToAttributes, entityClass, nodeA, nodeB, type); this.nodeC = nodeC; - this.type = type; } public Transformer3WInputEntityData( @@ -35,16 +35,45 @@ public Transformer3WInputEntityData( NodeInput nodeB, NodeInput nodeC, Transformer3WTypeInput type) { - super(fieldsToAttributes, entityClass, operator, nodeA, nodeB); + super(fieldsToAttributes, entityClass, operator, nodeA, nodeB, type); this.nodeC = nodeC; - this.type = type; } public NodeInput getNodeC() { return nodeC; } - public Transformer3WTypeInput getType() { - return type; + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + Transformer3WInputEntityData that = (Transformer3WInputEntityData) o; + return Objects.equals(nodeC, that.nodeC); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), nodeC); + } + + @Override + public String toString() { + return "Transformer3WInputEntityData{" + + "fieldsToValues=" + + getFieldsToValues() + + ", entityClass=" + + getEntityClass() + + ", operatorInput=" + + getOperatorInput() + + ", nodeA=" + + getNodeA() + + ", nodeB=" + + getNodeB() + + ", nodeC=" + + nodeC + + ", type=" + + getType() + + '}'; } } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/TypedConnectorInputEntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/input/TypedConnectorInputEntityData.java new file mode 100644 index 000000000..596b9453f --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/TypedConnectorInputEntityData.java @@ -0,0 +1,105 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.factory.input; + +import edu.ie3.datamodel.models.UniqueEntity; +import edu.ie3.datamodel.models.input.AssetTypeInput; +import edu.ie3.datamodel.models.input.NodeInput; +import edu.ie3.datamodel.models.input.OperatorInput; +import java.util.Map; +import java.util.Objects; + +/** + * Data used for those classes of {@link edu.ie3.datamodel.models.input.connector.ConnectorInput} + * that need an instance of some type T of {@link + * edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput} as well. + * + * @param Subclass of {@link AssetTypeInput} that is required for the construction of the + * ConnectorInput + */ +public class TypedConnectorInputEntityData + extends ConnectorInputEntityData { + + private final T type; + + /** + * Creates a new TypedConnectorInputEntityData object for a connector input that needs a type + * input as well. It sets the operator to default. + * + * @param fieldsToAttributes attribute map: field name -> value + * @param entityClass class of the entity to be created with this data + * @param nodeA input nodeA + * @param nodeB input nodeB + * @param type type input + */ + public TypedConnectorInputEntityData( + Map fieldsToAttributes, + Class entityClass, + NodeInput nodeA, + NodeInput nodeB, + T type) { + super(fieldsToAttributes, entityClass, nodeA, nodeB); + this.type = type; + } + + /** + * Creates a new TypedConnectorInputEntityData object for an operable connector input input that + * input that needs a type input as well + * + * @param fieldsToAttributes attribute map: field name -> value + * @param entityClass class of the entity to be created with this data + * @param operator specific operator to use + * @param nodeA input nodeA + * @param nodeB input nodeB + * @param type type input + */ + public TypedConnectorInputEntityData( + Map fieldsToAttributes, + Class entityClass, + OperatorInput operator, + NodeInput nodeA, + NodeInput nodeB, + T type) { + super(fieldsToAttributes, entityClass, operator, nodeA, nodeB); + this.type = type; + } + + public T getType() { + return type; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + TypedConnectorInputEntityData that = (TypedConnectorInputEntityData) o; + return type.equals(that.type); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), type); + } + + @Override + public String toString() { + return "TypedConnectorInputEntityData{" + + "fieldsToValues=" + + getFieldsToValues() + + ", entityClass=" + + getEntityClass() + + ", operatorInput=" + + getOperatorInput() + + ", nodeA=" + + getNodeA() + + ", nodeB=" + + getNodeB() + + ", type=" + + type + + '}'; + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/GraphicInputFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/GraphicInputFactory.java index b8be4bbc1..14687499a 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/GraphicInputFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/GraphicInputFactory.java @@ -60,7 +60,7 @@ protected T buildModel(D data) { UUID uuid = data.getUUID(UUID); final String graphicLayer = data.getField(GRAPHIC_LAYER); - final LineString pathLineString = + final LineString path = data.getLineString(PATH_LINE_STRING) .orElse( new GeometryFactory() @@ -69,7 +69,7 @@ protected T buildModel(D data) { NodeInput.DEFAULT_GEO_POSITION.getCoordinates(), NodeInput.DEFAULT_GEO_POSITION.getCoordinates()))); - return buildModel(data, uuid, graphicLayer, pathLineString); + return buildModel(data, uuid, graphicLayer, path); } /** @@ -79,6 +79,5 @@ protected T buildModel(D data) { * @param uuid UUID of the input entity * @return newly created asset object */ - protected abstract T buildModel( - D data, UUID uuid, String graphicLayer, LineString pathLineString); + protected abstract T buildModel(D data, UUID uuid, String graphicLayer, LineString path); } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/LineGraphicInputEntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/LineGraphicInputEntityData.java index 5cc4e8005..a40659669 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/LineGraphicInputEntityData.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/LineGraphicInputEntityData.java @@ -9,6 +9,7 @@ import edu.ie3.datamodel.models.input.connector.LineInput; import edu.ie3.datamodel.models.input.graphics.LineGraphicInput; import java.util.Map; +import java.util.Objects; /** * Data used by {@link LineGraphicInputFactory} used to create instances of {@link @@ -32,4 +33,30 @@ public LineGraphicInputEntityData(Map fieldsToAttributes, LineIn public LineInput getLine() { return line; } + + @Override + public String toString() { + return "LineGraphicInputEntityData{" + + "line=" + + line + + ", fieldsToValues=" + + getFieldsToValues() + + ", entityClass=" + + getEntityClass() + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + LineGraphicInputEntityData that = (LineGraphicInputEntityData) o; + return getLine().equals(that.getLine()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), getLine()); + } } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/LineGraphicInputFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/LineGraphicInputFactory.java index d7fd5352d..70ae74f91 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/LineGraphicInputFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/LineGraphicInputFactory.java @@ -29,7 +29,7 @@ protected String[] getAdditionalFields() { @Override protected LineGraphicInput buildModel( - LineGraphicInputEntityData data, UUID uuid, String graphicLayer, LineString pathLineString) { - return new LineGraphicInput(uuid, graphicLayer, pathLineString, data.getLine()); + LineGraphicInputEntityData data, UUID uuid, String graphicLayer, LineString path) { + return new LineGraphicInput(uuid, graphicLayer, path, data.getLine()); } } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/NodeGraphicInputEntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/NodeGraphicInputEntityData.java index 21f464184..11be555b9 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/NodeGraphicInputEntityData.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/NodeGraphicInputEntityData.java @@ -9,6 +9,7 @@ import edu.ie3.datamodel.models.input.NodeInput; import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput; import java.util.Map; +import java.util.Objects; /** * Data used by {@link NodeGraphicInputFactory} used to create instances of {@link @@ -33,4 +34,30 @@ public NodeGraphicInputEntityData(Map fieldsToAttributes, NodeIn public NodeInput getNode() { return node; } + + @Override + public String toString() { + return "NodeGraphicInputEntityData{" + + "node=" + + node + + ", fieldsToValues=" + + getFieldsToValues() + + ", entityClass=" + + getEntityClass() + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + NodeGraphicInputEntityData that = (NodeGraphicInputEntityData) o; + return getNode().equals(that.getNode()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), getNode()); + } } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/NodeGraphicInputFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/NodeGraphicInputFactory.java index 87f07f822..e02b25f0e 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/NodeGraphicInputFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/graphics/NodeGraphicInputFactory.java @@ -33,8 +33,8 @@ protected String[] getAdditionalFields() { @Override protected NodeGraphicInput buildModel( - NodeGraphicInputEntityData data, UUID uuid, String graphicLayer, LineString pathLineString) { + NodeGraphicInputEntityData data, UUID uuid, String graphicLayer, LineString path) { final Point point = data.getPoint(POINT).orElse(NodeInput.DEFAULT_GEO_POSITION); - return new NodeGraphicInput(uuid, graphicLayer, pathLineString, data.getNode(), point); + return new NodeGraphicInput(uuid, graphicLayer, path, data.getNode(), point); } } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/participant/FixedFeedInInputFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/input/participant/FixedFeedInInputFactory.java index 64c28a6ea..d7ff0f5a6 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/participant/FixedFeedInInputFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/participant/FixedFeedInInputFactory.java @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.factory.input.participant; +import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData; import edu.ie3.datamodel.models.OperationTime; import edu.ie3.datamodel.models.StandardUnits; import edu.ie3.datamodel.models.input.NodeInput; @@ -15,7 +16,7 @@ import tec.uom.se.ComparableQuantity; public class FixedFeedInInputFactory - extends SystemParticipantInputEntityFactory { + extends SystemParticipantInputEntityFactory { private static final String S_RATED = "srated"; private static final String COSPHI_RATED = "cosphirated"; @@ -31,7 +32,7 @@ protected String[] getAdditionalFields() { @Override protected FixedFeedInInput buildModel( - SystemParticipantEntityData data, + NodeAssetInputEntityData data, java.util.UUID uuid, String id, NodeInput node, diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/participant/LoadInputFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/input/participant/LoadInputFactory.java index 844587388..785fc4259 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/participant/LoadInputFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/participant/LoadInputFactory.java @@ -6,6 +6,7 @@ package edu.ie3.datamodel.io.factory.input.participant; import edu.ie3.datamodel.exceptions.ParsingException; +import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData; import edu.ie3.datamodel.models.OperationTime; import edu.ie3.datamodel.models.StandardLoadProfile; import edu.ie3.datamodel.models.StandardUnits; @@ -20,14 +21,14 @@ import tec.uom.se.ComparableQuantity; public class LoadInputFactory - extends SystemParticipantInputEntityFactory { + extends SystemParticipantInputEntityFactory { private static final Logger logger = LoggerFactory.getLogger(LoadInputFactory.class); - private static final String SLP = "slp"; + private static final String SLP = "standardloadprofile"; private static final String DSM = "dsm"; private static final String E_CONS_ANNUAL = "econsannual"; private static final String S_RATED = "srated"; - private static final String COS_PHI = "cosphi"; + private static final String COS_PHI = "cosphirated"; public LoadInputFactory() { super(LoadInput.class); @@ -40,7 +41,7 @@ protected String[] getAdditionalFields() { @Override protected LoadInput buildModel( - SystemParticipantEntityData data, + NodeAssetInputEntityData data, java.util.UUID uuid, String id, NodeInput node, diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/participant/PvInputFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/input/participant/PvInputFactory.java index 1b18b9cf0..b85ae0c33 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/participant/PvInputFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/participant/PvInputFactory.java @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.factory.input.participant; +import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData; import edu.ie3.datamodel.models.OperationTime; import edu.ie3.datamodel.models.StandardUnits; import edu.ie3.datamodel.models.input.NodeInput; @@ -17,7 +18,7 @@ import tec.uom.se.ComparableQuantity; public class PvInputFactory - extends SystemParticipantInputEntityFactory { + extends SystemParticipantInputEntityFactory { private static final String ALBEDO = "albedo"; private static final String AZIMUTH = "azimuth"; private static final String ETA_CONV = "etaconv"; @@ -26,7 +27,7 @@ public class PvInputFactory private static final String KT = "kt"; private static final String MARKET_REACTION = "marketreaction"; private static final String S_RATED = "srated"; - private static final String COS_PHI = "cosphi"; + private static final String COS_PHI_RATED = "cosphirated"; public PvInputFactory() { super(PvInput.class); @@ -35,13 +36,13 @@ public PvInputFactory() { @Override protected String[] getAdditionalFields() { return new String[] { - ALBEDO, AZIMUTH, ETA_CONV, HEIGHT, KG, KT, MARKET_REACTION, S_RATED, COS_PHI + ALBEDO, AZIMUTH, ETA_CONV, HEIGHT, KG, KT, MARKET_REACTION, S_RATED, COS_PHI_RATED }; } @Override protected PvInput buildModel( - SystemParticipantEntityData data, + NodeAssetInputEntityData data, java.util.UUID uuid, String id, NodeInput node, @@ -57,7 +58,7 @@ protected PvInput buildModel( final double kT = data.getDouble(KT); final boolean marketReaction = data.getBoolean(MARKET_REACTION); final ComparableQuantity sRated = data.getQuantity(S_RATED, StandardUnits.S_RATED); - final double cosPhi = data.getDouble(COS_PHI); + final double cosPhi = data.getDouble(COS_PHI_RATED); return new PvInput( uuid, diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/participant/SystemParticipantInputEntityFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/input/participant/SystemParticipantInputEntityFactory.java index 8ccc93223..dfc12651e 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/participant/SystemParticipantInputEntityFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/participant/SystemParticipantInputEntityFactory.java @@ -8,6 +8,7 @@ import edu.ie3.datamodel.exceptions.FactoryException; import edu.ie3.datamodel.exceptions.ParsingException; import edu.ie3.datamodel.io.factory.input.AssetInputEntityFactory; +import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData; import edu.ie3.datamodel.models.OperationTime; import edu.ie3.datamodel.models.input.NodeInput; import edu.ie3.datamodel.models.input.OperatorInput; @@ -17,7 +18,7 @@ /** * Abstract factory class for creating {@link SystemParticipantInput} entities with {@link - * SystemParticipantEntityData} data objects. + * NodeAssetInputEntityData} data objects. * * @param Type of entity that this factory can create. Must be a subclass of {@link * SystemParticipantInput} @@ -26,7 +27,7 @@ * @since 28.01.20 */ abstract class SystemParticipantInputEntityFactory< - T extends SystemParticipantInput, D extends SystemParticipantEntityData> + T extends SystemParticipantInput, D extends NodeAssetInputEntityData> extends AssetInputEntityFactory { private static final String Q_CHARACTERISTICS = "qcharacteristics"; diff --git a/src/main/java/edu/ie3/datamodel/io/factory/input/participant/SystemParticipantTypedEntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/input/participant/SystemParticipantTypedEntityData.java index 986bf8b78..d8e52c8c7 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/input/participant/SystemParticipantTypedEntityData.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/input/participant/SystemParticipantTypedEntityData.java @@ -5,11 +5,13 @@ */ package edu.ie3.datamodel.io.factory.input.participant; +import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData; import edu.ie3.datamodel.models.UniqueEntity; import edu.ie3.datamodel.models.input.NodeInput; import edu.ie3.datamodel.models.input.OperatorInput; import edu.ie3.datamodel.models.input.system.type.SystemParticipantTypeInput; import java.util.Map; +import java.util.Objects; /** * Data used for those classes of {@link @@ -19,8 +21,8 @@ * @param Subclass of {@link SystemParticipantTypeInput} that is required for the construction * of the SystemParticipantInput */ -class SystemParticipantTypedEntityData - extends SystemParticipantEntityData { +public class SystemParticipantTypedEntityData + extends NodeAssetInputEntityData { private final T typeInput; @@ -62,6 +64,36 @@ public SystemParticipantTypedEntityData( this.typeInput = typeInput; } + @Override + public String toString() { + return "SystemParticipantTypedEntityData{" + + "typeInput=" + + typeInput + + ", node=" + + getNode() + + ", operatorInput=" + + getOperatorInput() + + ", fieldsToValues=" + + getFieldsToValues() + + ", entityClass=" + + getEntityClass() + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + SystemParticipantTypedEntityData that = (SystemParticipantTypedEntityData) o; + return getTypeInput().equals(that.getTypeInput()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), getTypeInput()); + } + public T getTypeInput() { return typeInput; } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/typeinput/SystemParticipantTypeInputFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/typeinput/SystemParticipantTypeInputFactory.java index 758eda515..332a0b4c9 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/typeinput/SystemParticipantTypeInputFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/typeinput/SystemParticipantTypeInputFactory.java @@ -28,7 +28,7 @@ public class SystemParticipantTypeInputFactory private static final String CAP_EX = "capex"; private static final String OP_EX = "opex"; private static final String S_RATED = "srated"; - private static final String COS_PHI = "cosphi"; + private static final String COS_PHI_RATED = "cosphirated"; // required in multiple types private static final String ETA_CONV = "etaconv"; @@ -73,7 +73,7 @@ public SystemParticipantTypeInputFactory() { @Override protected List> getFields(SimpleEntityData data) { Set standardConstructorParams = - newSet(ENTITY_UUID, ENTITY_ID, CAP_EX, OP_EX, S_RATED, COS_PHI); + newSet(ENTITY_UUID, ENTITY_ID, CAP_EX, OP_EX, S_RATED, COS_PHI_RATED); Set constructorParameters = null; if (data.getEntityClass().equals(EvTypeInput.class)) { @@ -111,7 +111,7 @@ protected SystemParticipantTypeInput buildModel(SimpleEntityData data) { ComparableQuantity capEx = data.getQuantity(CAP_EX, StandardUnits.CAPEX); ComparableQuantity opEx = data.getQuantity(OP_EX, StandardUnits.ENERGY_PRICE); ComparableQuantity sRated = data.getQuantity(S_RATED, StandardUnits.S_RATED); - double cosPhi = data.getDouble(COS_PHI); + double cosPhi = data.getDouble(COS_PHI_RATED); if (data.getEntityClass().equals(EvTypeInput.class)) return buildEvTypeInput(data, uuid, id, capEx, opEx, sRated, cosPhi); diff --git a/src/main/java/edu/ie3/datamodel/io/processor/EntityProcessor.java b/src/main/java/edu/ie3/datamodel/io/processor/EntityProcessor.java index 8a27e5e4b..b570179c2 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/EntityProcessor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/EntityProcessor.java @@ -26,6 +26,8 @@ public abstract class EntityProcessor extends Processor< protected final String[] headerElements; private final SortedMap fieldNameToMethod; + private static final String NODE_INTERNAL = "nodeInternal"; + /** * Create a new EntityProcessor * @@ -33,7 +35,8 @@ public abstract class EntityProcessor extends Processor< */ public EntityProcessor(Class registeredClass) { super(registeredClass); - this.fieldNameToMethod = mapFieldNameToGetter(registeredClass); + this.fieldNameToMethod = + mapFieldNameToGetter(registeredClass, Collections.singleton(NODE_INTERNAL)); this.headerElements = fieldNameToMethod.keySet().toArray(new String[0]); } diff --git a/src/main/java/edu/ie3/datamodel/io/processor/Processor.java b/src/main/java/edu/ie3/datamodel/io/processor/Processor.java index a3480fb19..d7b523d62 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/Processor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/Processor.java @@ -12,10 +12,11 @@ import edu.ie3.datamodel.models.StandardLoadProfile; import edu.ie3.datamodel.models.StandardUnits; import edu.ie3.datamodel.models.UniqueEntity; +import edu.ie3.datamodel.models.input.OperatorInput; +import edu.ie3.datamodel.models.input.connector.SwitchInput; import edu.ie3.datamodel.models.input.system.StorageStrategy; import edu.ie3.datamodel.models.input.system.characteristic.CharacteristicInput; import edu.ie3.datamodel.models.voltagelevels.VoltageLevel; -import edu.ie3.util.TimeUtil; import java.beans.Introspector; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -57,18 +58,7 @@ public abstract class Processor { private static final String VOLT_LVL = NodeInputFactory.VOLT_LVL; private static final String V_RATED = NodeInputFactory.V_RATED; - /** - * Comparator to sort a Map of field name to getter method, so that the first entry is the uuid - * and the rest is sorted alphabetically. - */ - private static class UuidFirstComparator implements Comparator { - @Override - public int compare(String a, String b) { - if (a.equalsIgnoreCase(UniqueEntity.UUID_FIELD_NAME)) return -1; - else if (b.equalsIgnoreCase(UniqueEntity.UUID_FIELD_NAME)) return 1; - else return a.compareTo(b); - } - } + private static final String PARALLEL_DEVICES = "parallelDevices"; /** * Instantiates a Processor for a foreseen class @@ -90,6 +80,19 @@ protected Processor(Class foreSeenClass) { this.registeredClass = foreSeenClass; } + /** + * Comparator to sort a Map of field name to getter method, so that the first entry is the uuid + * and the rest is sorted alphabetically. + */ + private static class UuidFirstComparator implements Comparator { + @Override + public int compare(String a, String b) { + if (a.equalsIgnoreCase(UniqueEntity.UUID_FIELD_NAME)) return -1; + else if (b.equalsIgnoreCase(UniqueEntity.UUID_FIELD_NAME)) return 1; + else return a.compareTo(b); + } + } + /** * Maps the foreseen table fields to the objects getters * @@ -115,6 +118,12 @@ protected SortedMap mapFieldNameToGetter( // filter out properties with setters only .filter(pd -> Objects.nonNull(pd.getReadMethod())) .filter(pd -> !ignoreFields.contains(pd.getName())) + .filter( + pd -> + // switches can never be parallel but have this field due to inheritance -> filter + // it out as it cannot be passed into the constructor + !(registeredClass.equals(SwitchInput.class) + && pd.getName().equalsIgnoreCase(PARALLEL_DEVICES))) .forEach( pd -> { String fieldName = pd.getName(); @@ -246,7 +255,6 @@ protected String processMethodResult(Object methodReturnObject, Method method, S case "LineTypeInput": case "LineInput": case "NodeInput": - case "OperatorInput": case "StorageTypeInput": case "SystemParticipantInput": case "ThermalBusInput": @@ -257,6 +265,12 @@ protected String processMethodResult(Object methodReturnObject, Method method, S case "WecTypeInput": resultStringBuilder.append(((UniqueEntity) methodReturnObject).getUuid()); break; + case "OperatorInput": + resultStringBuilder.append( + ((OperatorInput) methodReturnObject).getId().equalsIgnoreCase("NO_OPERATOR_ASSIGNED") + ? "" + : ((OperatorInput) methodReturnObject).getUuid()); + break; case "EvCharacteristicInput": case "OlmCharacteristicInput": case "WecCharacteristicInput": @@ -359,7 +373,7 @@ protected String processOperationTime(OperationTime operationTime, String fieldN * @return string representation of the ZonedDateTime */ protected String processZonedDateTime(ZonedDateTime zonedDateTime) { - return TimeUtil.withDefaults.toString(zonedDateTime); + return zonedDateTime.toString(); } /** diff --git a/src/main/java/edu/ie3/datamodel/io/processor/ProcessorProvider.java b/src/main/java/edu/ie3/datamodel/io/processor/ProcessorProvider.java index 9a56bc33f..5b81a9cba 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/ProcessorProvider.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/ProcessorProvider.java @@ -237,7 +237,6 @@ private Collection> allEntityProcessors( Collection> resultingProcessors = new ArrayList<>(); - // todo add missing processors here // Input Entity Processor for (Class cls : InputEntityProcessor.eligibleEntityClasses) { resultingProcessors.add(new InputEntityProcessor(cls)); diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index fad1c6032..8a0da776c 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -11,14 +11,19 @@ import edu.ie3.datamodel.exceptions.SinkException; import edu.ie3.datamodel.io.FileNamingStrategy; import edu.ie3.datamodel.io.connectors.CsvFileConnector; -import edu.ie3.datamodel.io.connectors.DataConnector; import edu.ie3.datamodel.io.csv.BufferedCsvWriter; import edu.ie3.datamodel.io.extractor.Extractor; import edu.ie3.datamodel.io.extractor.NestedEntity; import edu.ie3.datamodel.io.processor.ProcessorProvider; import edu.ie3.datamodel.io.processor.timeseries.TimeSeriesProcessorKey; import edu.ie3.datamodel.models.UniqueEntity; -import edu.ie3.datamodel.models.input.InputEntity; +import edu.ie3.datamodel.models.input.*; +import edu.ie3.datamodel.models.input.connector.LineInput; +import edu.ie3.datamodel.models.input.connector.SwitchInput; +import edu.ie3.datamodel.models.input.connector.Transformer2WInput; +import edu.ie3.datamodel.models.input.connector.Transformer3WInput; +import edu.ie3.datamodel.models.input.container.*; +import edu.ie3.datamodel.models.input.system.*; import edu.ie3.datamodel.models.result.ResultEntity; import edu.ie3.datamodel.models.timeseries.TimeSeries; import edu.ie3.datamodel.models.timeseries.TimeSeriesEntry; @@ -26,11 +31,16 @@ import java.io.IOException; import java.util.*; import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; /** - * Sink that provides all capabilities to write {@link UniqueEntity}s to .csv-files + * Sink that provides all capabilities to write {@link UniqueEntity}s to .csv-files. Be careful + * about using methods other than {@link #persistJointGrid(JointGridContainer)} because all other + * methods do not check for duplicate entries but only dump the data they received. In + * contrast, when using {@link #persistJointGrid(JointGridContainer)}, all nested entities get + * extracted first and then dumped individually without any duplicate lines. * * @version 0.1 * @since 19.03.20 @@ -98,11 +108,6 @@ public CsvFileSink( if (initFiles) initFiles(processorProvider, connector); } - @Override - public DataConnector getDataConnector() { - return connector; - } - @Override public void persistAll(Collection entities) { for (T entity : entities) { @@ -135,28 +140,29 @@ public void persist(T entity) { TimeSeries timeSeries = (TimeSeries) entity; persistTimeSeries(timeSeries); } else { - throw new SinkException( - "I don't know how to handle an entity of class " + entity.getClass().getSimpleName()); + log.error( + "I don't know how to handle an entity of class {}", entity.getClass().getSimpleName()); } } @Override public void persistIgnoreNested(C entity) { - LinkedHashMap entityFieldData = - processorProvider - .handleEntity(entity) - .orElseThrow( - () -> - new SinkException( - "Cannot persist entity of type '" - + entity.getClass().getSimpleName() - + "'. This sink can only process the following entities: [" - + processorProvider.getRegisteredClasses().stream() - .map(Class::getSimpleName) - .collect(Collectors.joining(",")) - + "]")); - + LinkedHashMap entityFieldData = new LinkedHashMap<>(); try { + entityFieldData = + processorProvider + .handleEntity(entity) + .orElseThrow( + () -> + new SinkException( + "Cannot persist entity of type '" + + entity.getClass().getSimpleName() + + "'. This sink can only process the following entities: [" + + processorProvider.getRegisteredClasses().stream() + .map(Class::getSimpleName) + .collect(Collectors.joining(",")) + + "]")); + String[] headerElements = processorProvider.getHeaderElements(entity.getClass()); BufferedCsvWriter writer = connector.getOrInitWriter(entity.getClass(), headerElements, csvSep); @@ -168,6 +174,11 @@ public void persistIgnoreNested(C entity) { log.error("Exception occurred during retrieval of writer. Cannot write this element.", e); } catch (IOException e) { log.error("Exception occurred during writing of this element. Cannot write this element.", e); + } catch (SinkException e) { + log.error( + "Cannot persist provided entity '{}'. Exception: {}", + () -> entity.getClass().getSimpleName(), + () -> e); } } @@ -176,27 +187,117 @@ public void persistAllIgnoreNested(Collection entiti entities.parallelStream().forEach(this::persistIgnoreNested); } + @Override + public void persistJointGrid(JointGridContainer jointGridContainer) { + // get raw grid entities with types or operators + RawGridElements rawGridElements = jointGridContainer.getRawGrid(); + Set nodes = rawGridElements.getNodes(); + Set lines = rawGridElements.getLines(); + Set transformer2Ws = rawGridElements.getTransformer2Ws(); + Set transformer3Ws = rawGridElements.getTransformer3Ws(); + Set switches = rawGridElements.getSwitches(); + Set measurementUnits = rawGridElements.getMeasurementUnits(); + + // get system participants with types or operators + SystemParticipants systemParticipants = jointGridContainer.getSystemParticipants(); + Set bmPlants = systemParticipants.getBmPlants(); + Set chpPlants = systemParticipants.getChpPlants(); + Set evCS = systemParticipants.getEvCS(); + Set evs = systemParticipants.getEvs(); + Set fixedFeedIns = systemParticipants.getFixedFeedIns(); + Set heatPumps = systemParticipants.getHeatPumps(); + Set loads = systemParticipants.getLoads(); + Set pvPlants = systemParticipants.getPvPlants(); + Set storages = systemParticipants.getStorages(); + Set wecPlants = systemParticipants.getWecPlants(); + + // get graphic elements (just for better readability, we could also just get them directly + // below) + GraphicElements graphicElements = jointGridContainer.getGraphics(); + + // extract types + Set types = + Stream.of( + lines, + transformer2Ws, + transformer3Ws, + bmPlants, + chpPlants, + evs, + heatPumps, + storages, + wecPlants) + .flatMap(Collection::stream) + .map( + entityWithType -> + Extractor.extractType( + entityWithType)) // due to a bug in java 8 this *cannot* be replaced with + // method reference! + .collect(Collectors.toSet()); + + // extract operators + Set operators = + Stream.of( + nodes, + lines, + transformer2Ws, + transformer3Ws, + switches, + measurementUnits, + bmPlants, + chpPlants, + evCS, + evs, + fixedFeedIns, + heatPumps, + loads, + pvPlants, + storages, + wecPlants) + .flatMap(Collection::stream) + .map(Extractor::extractOperator) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toSet()); + + // persist all entities + Stream.of( + rawGridElements.allEntitiesAsList(), + systemParticipants.allEntitiesAsList(), + graphicElements.allEntitiesAsList(), + types, + operators) + .flatMap(Collection::stream) + .parallel() + .forEach(this::persistIgnoreNested); + } + + @Override + public void shutdown() { + // shutdown the connector + connector.shutdown(); + } + @Override public , V extends Value> void persistTimeSeries( TimeSeries timeSeries) { TimeSeriesProcessorKey key = new TimeSeriesProcessorKey(timeSeries); - log.debug("I got a time series of type {}.", key); - - Set> entityFieldData = - processorProvider - .handleTimeSeries(timeSeries) - .orElseThrow( - () -> - new SinkException( - "Cannot persist time series of combination '" - + key - + "'. This sink can only process the following combinations: [" - + processorProvider.getRegisteredTimeSeriesCombinations().stream() - .map(TimeSeriesProcessorKey::toString) - .collect(Collectors.joining(",")) - + "]")); try { + Set> entityFieldData = + processorProvider + .handleTimeSeries(timeSeries) + .orElseThrow( + () -> + new SinkException( + "Cannot persist time series of combination '" + + key + + "'. This sink can only process the following combinations: [" + + processorProvider.getRegisteredTimeSeriesCombinations().stream() + .map(TimeSeriesProcessorKey::toString) + .collect(Collectors.joining(",")) + + "]")); + String[] headerElements = processorProvider.getHeaderElements(key); BufferedCsvWriter writer = connector.getOrInitWriter(timeSeries, headerElements, csvSep); entityFieldData.forEach( @@ -204,14 +305,21 @@ public , V extends Value> void persistTimeSeries( try { writer.write(data); } catch (IOException e) { - log.error("Cannot write the following entity data: '{}'", data); + log.error( + "Cannot write the following entity data: '{}'. Exception: {}", + () -> data, + () -> e); + } catch (SinkException e) { + log.error("Exception occurred during processing the provided data fields: ", e); } }); } catch (ProcessorProviderException e) { log.error( - "Exception occurred during receiving of header elements. Cannot write this element", e); + "Exception occurred during receiving of header elements. Cannot write this element.", e); } catch (ConnectorException e) { - log.error("Exception occurred during acquisition of writer"); + log.error("Exception occurred during acquisition of writer.", e); + } catch (SinkException e) { + log.error("Exception occurred during processor request: ", e); } } diff --git a/src/main/java/edu/ie3/datamodel/io/sink/DataSink.java b/src/main/java/edu/ie3/datamodel/io/sink/DataSink.java index 6caa0d5c8..1d5518d4f 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/DataSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/DataSink.java @@ -8,6 +8,7 @@ import edu.ie3.datamodel.io.connectors.DataConnector; import edu.ie3.datamodel.io.processor.EntityProcessor; import edu.ie3.datamodel.models.UniqueEntity; +import edu.ie3.datamodel.models.input.container.JointGridContainer; import edu.ie3.datamodel.models.timeseries.TimeSeries; import edu.ie3.datamodel.models.timeseries.TimeSeriesEntry; import edu.ie3.datamodel.models.value.Value; @@ -20,8 +21,11 @@ */ public interface DataSink { - /** @return the connector of this sink */ - DataConnector getDataConnector(); + /** + * Shutdown this sink and do all cleanup operations (e.g. closing of the {@link DataConnector} + * here + */ + void shutdown(); /** * Should implement the entry point of a data sink to persist an entity. By default this method @@ -40,8 +44,8 @@ public interface DataSink { * Should implement the entry point of a data sink to persist multiple entities in a collection. * By default this method should take care about the extraction process of nested entities (if * any) and use {@link edu.ie3.datamodel.io.extractor.Extractor} accordingly. For a faster method - * that neglects the nested objects persistence use {@link - * DataSink#persistIgnoreNested(UniqueEntity)} + * that neglects the nested objects persistence and only persists the uuid of the nested * objects + * (if any), instead of the object itself use {@link DataSink#persistIgnoreNested(UniqueEntity)} * * @param entities a collection of entities that should be persisted * @param bounded to be all unique entities. Handling of specific entities is normally then @@ -92,4 +96,11 @@ public interface DataSink { */ , V extends Value> void persistTimeSeries( TimeSeries timeSeries); + + /** + * Should implement the entry point of a data sink to persist a whole {@link JointGridContainer} + * + * @param jointGridContainer the {@link JointGridContainer} that should be persisted + */ + void persistJointGrid(JointGridContainer jointGridContainer); } diff --git a/src/main/java/edu/ie3/datamodel/io/source/DataSource.java b/src/main/java/edu/ie3/datamodel/io/source/DataSource.java index 89d0b8887..fe9f3cf3c 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/DataSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/DataSource.java @@ -5,11 +5,10 @@ */ package edu.ie3.datamodel.io.source; -import edu.ie3.datamodel.io.connectors.DataConnector; - -/** Describes a class that fetches data from a persistence location */ -public interface DataSource { - - /** @return the connector of this source */ - DataConnector getDataConnector(); -} +/** + * General interface that is implemented by all specific data sources for different types of data + * structures that are persisted in different locations. Note: This interface is still under + * development and should be considered more as an internal API. It might change or even will be + * removed in the future! + */ +public interface DataSource {} diff --git a/src/main/java/edu/ie3/datamodel/io/source/GraphicSource.java b/src/main/java/edu/ie3/datamodel/io/source/GraphicSource.java new file mode 100644 index 000000000..cbe2dabc0 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/source/GraphicSource.java @@ -0,0 +1,104 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.source; + +import edu.ie3.datamodel.models.input.NodeInput; +import edu.ie3.datamodel.models.input.connector.LineInput; +import edu.ie3.datamodel.models.input.container.GraphicElements; +import edu.ie3.datamodel.models.input.graphics.LineGraphicInput; +import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput; +import java.util.Optional; +import java.util.Set; + +/** + * Interface that provides the capability to build entities of type {@link + * edu.ie3.datamodel.models.input.graphics.GraphicInput} from different data sources e.g. .csv files + * or databases + * + * @version 0.1 + * @since 08.04.20 + */ +public interface GraphicSource extends DataSource { + + /** + * Should return either a consistent instance of {@link GraphicElements} wrapped in {@link + * Optional} or an empty {@link Optional}. The decision to use {@link Optional} instead of + * returning the {@link GraphicElements} instance directly is motivated by the fact, that a {@link + * GraphicElements} is a container instance that depends on several other entities. Without being + * complete, it is useless for further processing. Hence, whenever at least one entity {@link + * GraphicElements} depends on cannot be provided, {@link Optional#empty()} should be returned and + * extensive logging should provide enough information to debug the error and fix the persistent + * data that has been failed to processed. + * + *

Furthermore, it is expected, that the specific implementation of this method ensures not + * only the completeness of the resulting {@link GraphicElements} instance, but also its validity + * e.g. in the sense that not duplicate UUIDs exist within all entities contained in the returning + * instance. + * + * @return either a valid, complete {@link GraphicElements} optional or {@link Optional#empty()} + */ + Optional getGraphicElements(); + + /** + * Returns a set of {@link NodeGraphicInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link NodeGraphicInput} which has to be checked manually, as {@link + * NodeGraphicInput#equals(Object)} is NOT restricted on the uuid of {@link NodeGraphicInput}. + * + * @return a set of object and uuid unique {@link NodeGraphicInput} entities + */ + Set getNodeGraphicInput(); + + /** + * Returns a set of {@link NodeGraphicInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link NodeGraphicInput} which has to be checked manually, as {@link + * NodeGraphicInput#equals(Object)} is NOT restricted on the uuid of {@link NodeGraphicInput}. + * + *

In contrast to {@link this#getNodeGraphicInput()} this interface provides the ability to + * pass in an already existing set of {@link NodeInput} entities, the {@link NodeGraphicInput} + * instances depend on. Doing so, already loaded nodes can be recycled to improve performance and + * prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param nodes a set of object and uuid unique nodes that should be used for the returning + * instances + * @return a set of object and uuid unique {@link NodeGraphicInput} entities + */ + Set getNodeGraphicInput(Set nodes); + + /** + * Returns a set of {@link LineGraphicInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link LineGraphicInput} which has to be checked manually, as {@link + * LineGraphicInput#equals(Object)} is NOT restricted on the uuid of {@link LineGraphicInput}. + * + * @return a set of object and uuid unique {@link LineGraphicInput} entities + */ + Set getLineGraphicInput(); + + /** + * Returns a set of {@link LineGraphicInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link LineGraphicInput} which has to be checked manually, as {@link + * LineGraphicInput#equals(Object)} is NOT restricted on the uuid of {@link LineGraphicInput}. + * + *

In contrast to {@link this#getLineGraphicInput()} this interface provides the ability to + * pass in an already existing set of {@link LineInput} entities, the {@link LineGraphicInput} + * instances depend on. Doing so, already loaded nodes can be recycled to improve performance and + * prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param lines a set of object and uuid unique lines that should be used for the returning + * instances + * @return a set of object and uuid unique {@link LineGraphicInput} entities + */ + Set getLineGraphicInput(Set lines); +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/RawGridSource.java b/src/main/java/edu/ie3/datamodel/io/source/RawGridSource.java index 84639c10a..7c013d196 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/RawGridSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/RawGridSource.java @@ -5,10 +5,257 @@ */ package edu.ie3.datamodel.io.source; +import edu.ie3.datamodel.models.input.MeasurementUnitInput; +import edu.ie3.datamodel.models.input.NodeInput; +import edu.ie3.datamodel.models.input.OperatorInput; +import edu.ie3.datamodel.models.input.connector.LineInput; +import edu.ie3.datamodel.models.input.connector.SwitchInput; +import edu.ie3.datamodel.models.input.connector.Transformer2WInput; +import edu.ie3.datamodel.models.input.connector.Transformer3WInput; +import edu.ie3.datamodel.models.input.connector.type.LineTypeInput; +import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput; +import edu.ie3.datamodel.models.input.connector.type.Transformer3WTypeInput; import edu.ie3.datamodel.models.input.container.RawGridElements; +import java.util.Optional; +import java.util.Set; -/** Describes a data source for raw grid data */ +/** + * Interface that provides the capability to build entities that are hold by a {@link + * RawGridElements} as well as the {@link RawGridElements} container as well from different data + * sources e.g. .csv files or databases. + * + * @version 0.1 + * @since 08.04.20 + */ public interface RawGridSource extends DataSource { - /** @return grid data as an aggregation of its elements */ - RawGridElements getGridData(); + /** + * Should return either a consistent instance of {@link RawGridElements} wrapped in {@link + * Optional} or an empty {@link Optional}. The decision to use {@link Optional} instead of + * returning the {@link RawGridElements} instance directly is motivated by the fact, that a {@link + * RawGridElements} is a container instance that depends on several other entities. Without being + * complete, it is useless for further processing. + * + *

Hence, whenever at least one entity {@link RawGridElements} depends on cannot be provided, + * {@link Optional#empty()} should be returned and extensive logging should provide enough + * information to debug the error and fix the persistent data that has been failed to processed. + * + *

Furthermore, it is expected, that the specific implementation of this method ensures not + * only the completeness of the resulting {@link RawGridElements} instance, but also its validity + * e.g. in the sense that not duplicate UUIDs exist within all entities contained in the returning + * instance. + * + * @return either a valid, complete {@link RawGridElements} optional or {@link Optional#empty()} + */ + Optional getGridData(); + + /** + * Returns a unique set of {@link NodeInput} instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link NodeInput} which has to be checked manually, + * as {@link NodeInput#equals(Object)} is NOT restricted on the uuid of {@link NodeInput}. + * + * @return a set of object and uuid unique {@link NodeInput} entities + */ + Set getNodes(); + + /** + * Returns a set of {@link NodeInput} instances. This set has to be unique in the sense of object + * uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided {@link + * NodeInput} which has to be checked manually, as {@link NodeInput#equals(Object)} is NOT + * restricted on the uuid of {@link NodeInput}. + * + *

In contrast to {@link this#getNodes()} this interface provides the ability to pass in an + * already existing set of {@link OperatorInput} entities, the {@link NodeInput} instances depend + * on. Doing so, already loaded nodes can be recycled to improve performance and prevent + * unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @return a set of object and uuid unique {@link NodeInput} entities + */ + Set getNodes(Set operators); + + /** + * Returns a unique set of {@link LineInput} instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link LineInput} which has to be checked manually, + * as {@link LineInput#equals(Object)} is NOT restricted on the uuid of {@link LineInput}. + * + * @return a set of object and uuid unique {@link LineInput} entities + */ + Set getLines(); + + /** + * Returns a set of {@link LineInput} instances. This set has to be unique in the sense of object + * uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided {@link + * LineInput} which has to be checked manually, as {@link LineInput#equals(Object)} is NOT + * restricted on the uuid of {@link LineInput}. + * + *

In contrast to {@link this#getNodes()} this interface provides the ability to pass in an + * already existing set of {@link NodeInput}, {@link LineTypeInput} and {@link OperatorInput} + * entities, the {@link LineInput} instances depend on. Doing so, already loaded nodes, line types + * and operators can be recycled to improve performance and prevent unnecessary loading + * operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @param lineTypeInputs a set of object and uuid unique {@link LineTypeInput} entities + * @return a set of object and uuid unique {@link LineInput} entities + */ + Set getLines( + Set nodes, Set lineTypeInputs, Set operators); + + /** + * Returns a unique set of {@link Transformer2WInput} instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link Transformer2WInput} which has to be checked + * manually, as {@link Transformer2WInput#equals(Object)} is NOT restricted on the uuid of {@link + * Transformer2WInput}. + * + * @return a set of object and uuid unique {@link Transformer2WInput} entities + */ + Set get2WTransformers(); + + /** + * Returns a set of {@link Transformer2WInput} instances. This set has to be unique in the sense + * of object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link Transformer2WInput} which has to be checked manually, as {@link + * Transformer2WInput#equals(Object)} is NOT restricted on the uuid of {@link Transformer2WInput}. + * + *

In contrast to {@link this#getNodes()} this interface provides the ability to pass in an + * already existing set of {@link NodeInput}, {@link Transformer2WTypeInput} and {@link + * OperatorInput} entities, the {@link Transformer2WInput} instances depend on. Doing so, already + * loaded nodes, line types and operators can be recycled to improve performance and prevent + * unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @param transformer2WTypes a set of object and uuid unique {@link Transformer2WTypeInput} + * entities + * @return a set of object and uuid unique {@link Transformer2WInput} entities + */ + Set get2WTransformers( + Set nodes, + Set transformer2WTypes, + Set operators); + + /** + * Returns a unique set of {@link Transformer3WInput} instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link Transformer3WInput} which has to be checked + * manually, as {@link Transformer3WInput#equals(Object)} is NOT restricted on the uuid of {@link + * Transformer3WInput}. + * + * @return a set of object and uuid unique {@link Transformer3WInput} entities + */ + Set get3WTransformers(); + + /** + * Returns a set of {@link Transformer3WInput} instances. This set has to be unique in the sense + * of object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link Transformer3WInput} which has to be checked manually, as {@link + * Transformer3WInput#equals(Object)} is NOT restricted on the uuid of {@link Transformer3WInput}. + * + *

In contrast to {@link this#getNodes()} this interface provides the ability to pass in an + * already existing set of {@link NodeInput}, {@link Transformer3WTypeInput} and {@link + * OperatorInput} entities, the {@link Transformer3WInput} instances depend on. Doing so, already + * loaded nodes, line types and operators can be recycled to improve performance and prevent + * unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @param transformer3WTypeInputs a set of object and uuid unique {@link Transformer3WTypeInput} + * entities + * @return a set of object and uuid unique {@link Transformer3WInput} entities + */ + Set get3WTransformers( + Set nodes, + Set transformer3WTypeInputs, + Set operators); + + /** + * Returns a unique set of {@link SwitchInput} instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link SwitchInput} which has to be checked + * manually, as {@link SwitchInput#equals(Object)} is NOT restricted on the uuid of {@link + * SwitchInput}. + * + * @return a set of object and uuid unique {@link SwitchInput} entities + */ + Set getSwitches(); + + /** + * Returns a set of {@link SwitchInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link SwitchInput} which has to be checked manually, as {@link SwitchInput#equals(Object)} is + * NOT restricted on the uuid of {@link SwitchInput}. + * + *

In contrast to {@link this#getNodes()} this interface provides the ability to pass in an + * already existing set of {@link NodeInput} and {@link OperatorInput} entities, the {@link + * SwitchInput} instances depend on. Doing so, already loaded nodes, line types and operators can + * be recycled to improve performance and prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @return a set of object and uuid unique {@link SwitchInput} entities + */ + Set getSwitches(Set nodes, Set operators); + + /** + * Returns a unique set of {@link MeasurementUnitInput} instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link MeasurementUnitInput} which has to be checked + * manually, as {@link MeasurementUnitInput#equals(Object)} is NOT restricted on the uuid of + * {@link MeasurementUnitInput}. + * + * @return a set of object and uuid unique {@link MeasurementUnitInput} entities + */ + Set getMeasurementUnits(); + + /** + * Returns a set of {@link MeasurementUnitInput} instances. This set has to be unique in the sense + * of object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link MeasurementUnitInput} which has to be checked manually, as {@link + * MeasurementUnitInput#equals(Object)} is NOT restricted on the uuid of {@link + * MeasurementUnitInput}. + * + *

In contrast to {@link this#getNodes()} this interface provides the ability to pass in an + * already existing set of {@link NodeInput} and {@link OperatorInput} entities, the {@link + * MeasurementUnitInput} instances depend on. Doing so, already loaded nodes, line types and + * operators can be recycled to improve performance and prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @return a set of object and uuid unique {@link MeasurementUnitInput} entities + */ + Set getMeasurementUnits(Set nodes, Set operators); } diff --git a/src/main/java/edu/ie3/datamodel/io/source/SystemParticipantSource.java b/src/main/java/edu/ie3/datamodel/io/source/SystemParticipantSource.java index a84547ae9..6d132b8ca 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/SystemParticipantSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/SystemParticipantSource.java @@ -5,11 +5,388 @@ */ package edu.ie3.datamodel.io.source; +import edu.ie3.datamodel.models.input.EvcsInput; +import edu.ie3.datamodel.models.input.NodeInput; +import edu.ie3.datamodel.models.input.OperatorInput; import edu.ie3.datamodel.models.input.container.SystemParticipants; +import edu.ie3.datamodel.models.input.system.*; +import edu.ie3.datamodel.models.input.system.type.*; +import edu.ie3.datamodel.models.input.thermal.ThermalBusInput; +import edu.ie3.datamodel.models.input.thermal.ThermalStorageInput; +import java.util.Optional; +import java.util.Set; -/** Describes a data source for system participants */ +/** + * Interface that provides the capability to build entities of type {@link SystemParticipantInput} + * as well as {@link SystemParticipants} container from .csv files. + * + * @version 0.1 + * @since 08.04.20 + */ public interface SystemParticipantSource extends DataSource { - /** @return system participant data as an aggregation of all elements in this grid */ - SystemParticipants fetchSystemParticipants(); + /** + * Should return either a consistent instance of {@link SystemParticipants} wrapped in {@link + * Optional} or an empty {@link Optional}. The decision to use {@link Optional} instead of + * returning the {@link SystemParticipants} instance directly is motivated by the fact, that a + * {@link SystemParticipants} is a container instance that depends on several other entities. + * Without being complete, it is useless for further processing. + * + *

Hence, whenever at least one entity {@link SystemParticipants} depends on cannot be + * provided, {@link Optional#empty()} should be returned and extensive logging should provide + * enough information to debug the error and fix the persistent data that has been failed to + * processed. + * + *

Furthermore, it is expected, that the specific implementation of this method ensures not + * only the completeness of the resulting {@link SystemParticipants} instance, but also its + * validity e.g. in the sense that not duplicate UUIDs exist within all entities contained in the + * returning instance. + * + * @return either a valid, complete {@link SystemParticipants} optional or {@link + * Optional#empty()} + */ + Optional getSystemParticipants(); + + /** + * Returns a unique set of {@link FixedFeedInInput instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link FixedFeedInInput} which has to be checked manually, + * as {@link FixedFeedInInput#equals(Object)} is NOT restricted on the uuid of {@link FixedFeedInInput}. + * + * @return a set of object and uuid unique {@link FixedFeedInInput} entities + */ + Set getFixedFeedIns(); + + /** + * Returns a set of {@link FixedFeedInInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link FixedFeedInInput} which has to be checked manually, as {@link + * FixedFeedInInput#equals(Object)} is NOT restricted on the uuid of {@link FixedFeedInInput}. + * + *

In contrast to {@link this#getFixedFeedIns()} ()} this interface provides the ability to + * pass in an already existing set of {@link NodeInput} and {@link OperatorInput} entities, the + * {@link FixedFeedInInput} instances depend on. Doing so, already loaded nodes can be recycled to + * improve performance and prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @return a set of object and uuid unique {@link FixedFeedInInput} entities + */ + Set getFixedFeedIns(Set nodes, Set operators); + + /** + * Returns a unique set of {@link PvInput instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link PvInput} which has to be checked manually, + * as {@link PvInput#equals(Object)} is NOT restricted on the uuid of {@link PvInput}. + * + * @return a set of object and uuid unique {@link PvInput} entities + */ + Set getPvPlants(); + + /** + * Returns a set of {@link PvInput} instances. This set has to be unique in the sense of object + * uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided {@link + * PvInput} which has to be checked manually, as {@link PvInput#equals(Object)} is NOT restricted + * on the uuid of {@link PvInput}. + * + *

In contrast to {@link this#getPvPlants()} ()} ()} this interface provides the ability to + * pass in an already existing set of {@link NodeInput} and {@link OperatorInput} entities, the + * {@link PvInput} instances depend on. Doing so, already loaded nodes can be recycled to improve + * performance and prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @return a set of object and uuid unique {@link PvInput} entities + */ + Set getPvPlants(Set nodes, Set operators); + + /** + * Returns a unique set of {@link LoadInput instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link LoadInput} which has to be checked manually, + * as {@link LoadInput#equals(Object)} is NOT restricted on the uuid of {@link LoadInput}. + * + * @return a set of object and uuid unique {@link LoadInput} entities + */ + Set getLoads(); + + /** + * Returns a set of {@link LoadInput} instances. This set has to be unique in the sense of object + * uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided {@link + * LoadInput} which has to be checked manually, as {@link LoadInput#equals(Object)} is NOT + * restricted on the uuid of {@link LoadInput}. + * + *

In contrast to {@link this#getLoads()} ()} this interface provides the ability to pass in an + * already existing set of {@link NodeInput} and {@link OperatorInput} entities, the {@link + * LoadInput} instances depend on. Doing so, already loaded nodes can be recycled to improve + * performance and prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @return a set of object and uuid unique {@link LoadInput} entities + */ + Set getLoads(Set nodes, Set operators); + + /** + * Returns a unique set of {@link EvcsInput instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link EvcsInput} which has to be checked manually, + * as {@link EvcsInput#equals(Object)} is NOT restricted on the uuid of {@link EvcsInput}. + * + * @return a set of object and uuid unique {@link EvcsInput} entities + */ + Set getEvCS(); + + /** + * Returns a set of {@link EvcsInput} instances. This set has to be unique in the sense of object + * uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided {@link + * EvcsInput} which has to be checked manually, as {@link EvcsInput#equals(Object)} is NOT + * restricted on the uuid of {@link EvcsInput}. + * + *

In contrast to {@link this#getEvCS()} ()} this interface provides the ability to pass in an + * already existing set of {@link NodeInput} and {@link OperatorInput} entities, the {@link + * EvcsInput} instances depend on. Doing so, already loaded nodes can be recycled to improve + * performance and prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @return a set of object and uuid unique {@link EvcsInput} entities + */ + Set getEvCS(Set nodes, Set operators); + + /** + * Returns a unique set of {@link BmInput instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link BmInput} which has to be checked manually, + * as {@link BmInput#equals(Object)} is NOT restricted on the uuid of {@link BmInput}. + * + * @return a set of object and uuid unique {@link BmInput} entities + */ + Set getBmPlants(); + + /** + * Returns a set of {@link BmInput} instances. This set has to be unique in the sense of object + * uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided {@link + * BmInput} which has to be checked manually, as {@link BmInput#equals(Object)} is NOT restricted + * on the uuid of {@link BmInput}. + * + *

In contrast to {@link this#getBmPlants()} ()} this interface provides the ability to pass in + * an already existing set of {@link NodeInput}, {@link BmTypeInput} and {@link OperatorInput} + * entities, the {@link BmInput} instances depend on. Doing so, already loaded nodes can be + * recycled to improve performance and prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @param types a set of object and uuid unique {@link BmTypeInput} entities + * @return a set of object and uuid unique {@link BmInput} entities + */ + Set getBmPlants( + Set nodes, Set operators, Set types); + + /** + * Returns a unique set of {@link StorageInput instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link StorageInput} which has to be checked manually, + * as {@link StorageInput#equals(Object)} is NOT restricted on the uuid of {@link StorageInput}. + * + * @return a set of object and uuid unique {@link StorageInput} entities + */ + Set getStorages(); + + /** + * Returns a set of {@link StorageInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link StorageInput} which has to be checked manually, as {@link StorageInput#equals(Object)} + * is NOT restricted on the uuid of {@link StorageInput}. + * + *

In contrast to {@link this#getStorages()} ()} this interface provides the ability to pass in + * an already existing set of {@link NodeInput}, {@link StorageTypeInput} and {@link + * OperatorInput} entities, the {@link StorageInput} instances depend on. Doing so, already loaded + * nodes can be recycled to improve performance and prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @param types a set of object and uuid unique {@link StorageTypeInput} entities + * @return a set of object and uuid unique {@link StorageInput} entities + */ + Set getStorages( + Set nodes, Set operators, Set types); + + /** + * Returns a unique set of {@link WecInput instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link WecInput} which has to be checked manually, + * as {@link WecInput#equals(Object)} is NOT restricted on the uuid of {@link WecInput}. + * + * @return a set of object and uuid unique {@link WecInput} entities + */ + Set getWecPlants(); + + /** + * Returns a set of {@link WecInput} instances. This set has to be unique in the sense of object + * uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided {@link + * WecInput} which has to be checked manually, as {@link WecInput#equals(Object)} is NOT + * restricted on the uuid of {@link WecInput}. + * + *

In contrast to {@link this#getWecPlants()} ()} this interface provides the ability to pass + * in an already existing set of {@link NodeInput}, {@link WecTypeInput} and {@link OperatorInput} + * entities, the {@link WecInput} instances depend on. Doing so, already loaded nodes can be + * recycled to improve performance and prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @param types a set of object and uuid unique {@link WecTypeInput} entities + * @return a set of object and uuid unique {@link WecInput} entities + */ + Set getWecPlants( + Set nodes, Set operators, Set types); + + /** + * Returns a unique set of {@link EvInput instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link EvInput} which has to be checked manually, + * as {@link EvInput#equals(Object)} is NOT restricted on the uuid of {@link EvInput}. + * + * @return a set of object and uuid unique {@link EvInput} entities + */ + Set getEvs(); + + /** + * Returns a set of {@link EvInput} instances. This set has to be unique in the sense of object + * uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided {@link + * EvInput} which has to be checked manually, as {@link EvInput#equals(Object)} is NOT restricted + * on the uuid of {@link EvInput}. + * + *

In contrast to {@link this#getEvs()} this interface provides the ability to pass in an + * already existing set of {@link NodeInput}, {@link EvTypeInput} and {@link OperatorInput} + * entities, the {@link EvInput} instances depend on. Doing so, already loaded nodes can be + * recycled to improve performance and prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @param types a set of object and uuid unique {@link EvTypeInput} entities + * @return a set of object and uuid unique {@link EvInput} entities + */ + Set getEvs(Set nodes, Set operators, Set types); + + /** + * Returns a unique set of {@link ChpInput instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link ChpInput} which has to be checked manually, + * as {@link ChpInput#equals(Object)} is NOT restricted on the uuid of {@link ChpInput}. + * + * @return a set of object and uuid unique {@link ChpInput} entities + */ + Set getChpPlants(); + + /** + * Returns a set of {@link ChpInput} instances. This set has to be unique in the sense of object + * uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided {@link + * ChpInput} which has to be checked manually, as {@link ChpInput#equals(Object)} is NOT + * restricted on the uuid of {@link ChpInput}. + * + *

In contrast to {@link this#getChpPlants()} this interface provides the ability to pass in an + * already existing set of {@link NodeInput}, {@link ChpTypeInput}, {@link ThermalBusInput}, + * {@link ThermalStorageInput} and {@link OperatorInput} entities, the {@link ChpInput} instances + * depend on. Doing so, already loaded nodes can be recycled to improve performance and prevent + * unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @param types a set of object and uuid unique {@link ChpTypeInput} entities + * @param thermalBuses a set of object and uuid unique {@link ThermalBusInput} entities + * @param thermalStorages a set of object and uuid unique {@link ThermalStorageInput} entities + * @return a set of object and uuid unique {@link ChpInput} entities + */ + Set getChpPlants( + Set nodes, + Set operators, + Set types, + Set thermalBuses, + Set thermalStorages); + + /** + * Returns a unique set of {@link HpInput instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link HpInput} which has to be checked manually, + * as {@link HpInput#equals(Object)} is NOT restricted on the uuid of {@link HpInput}. + * + * @return a set of object and uuid unique {@link HpInput} entities + */ + Set getHeatPumps(); + + /** + * Returns a set of {@link HpInput} instances. This set has to be unique in the sense of object + * uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided {@link + * HpInput} which has to be checked manually, as {@link HpInput#equals(Object)} is NOT restricted + * on the uuid of {@link HpInput}. + * + *

In contrast to {@link this#getHeatPumps()} ()} this interface provides the ability to pass + * in an already existing set of {@link NodeInput}, {@link HpTypeInput}, {@link ThermalBusInput}, + * {@link ThermalStorageInput} and {@link OperatorInput} entities, the {@link HpInput} instances + * depend on. Doing so, already loaded nodes can be recycled to improve performance and prevent + * unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param nodes a set of object and uuid unique {@link NodeInput} entities + * @param types a set of object and uuid unique {@link HpTypeInput} entities + * @param thermalBuses a set of object and uuid unique {@link ThermalBusInput} entities + * @return a set of object and uuid unique {@link HpInput} entities + */ + Set getHeatPumps( + Set nodes, + Set operators, + Set types, + Set thermalBuses); } diff --git a/src/main/java/edu/ie3/datamodel/io/source/ThermalSource.java b/src/main/java/edu/ie3/datamodel/io/source/ThermalSource.java new file mode 100644 index 000000000..e735b1ec2 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/source/ThermalSource.java @@ -0,0 +1,164 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.source; + +import edu.ie3.datamodel.models.input.OperatorInput; +import edu.ie3.datamodel.models.input.thermal.CylindricalStorageInput; +import edu.ie3.datamodel.models.input.thermal.ThermalBusInput; +import edu.ie3.datamodel.models.input.thermal.ThermalHouseInput; +import edu.ie3.datamodel.models.input.thermal.ThermalStorageInput; +import java.util.Set; + +/** + * Interface that provides the capability to build thermal {@link + * edu.ie3.datamodel.models.input.AssetInput} entities from persistent data e.g. .csv files or + * databases + * + * @version 0.1 + * @since 08.04.20 + */ +public interface ThermalSource extends DataSource { + + /** + * Returns a unique set of {@link ThermalBusInput} instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link ThermalBusInput} which has to be checked + * manually, as {@link ThermalBusInput#equals(Object)} is NOT restricted on the uuid of {@link + * ThermalBusInput}. + * + * @return a set of object and uuid unique {@link ThermalBusInput} entities + */ + Set getThermalBuses(); + + /** + * Returns a set of {@link ThermalBusInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link ThermalBusInput} which has to be checked manually, as {@link + * ThermalBusInput#equals(Object)} is NOT restricted on the uuid of {@link ThermalBusInput}. + * + *

In contrast to {@link this#getThermalBuses())} this interface provides the ability to pass + * in an already existing set of {@link OperatorInput} entities, the {@link ThermalBusInput} + * instances depend on. Doing so, already loaded nodes can be recycled to improve performance and + * prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @return a set of object and uuid unique {@link ThermalBusInput} entities + */ + Set getThermalBuses(Set operators); + + /** + * Returns a unique set of instances of all entities implementing the {@link ThermalStorageInput} + * abstract class. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link ThermalStorageInput} which has to be checked + * manually, as {@link ThermalStorageInput#equals(Object)} is NOT restricted on the uuid of {@link + * ThermalStorageInput}. + * + * @return a set of object and uuid unique {@link ThermalStorageInput} entities + */ + Set getThermalStorages(); + + /** + * Returns a unique set of instances of all entities implementing the {@link ThermalStorageInput} + * abstract class. This set has to be unique in the sense of object uniqueness but also in the + * sense of {@link java.util.UUID} uniqueness of the provided {@link ThermalStorageInput} which + * has to be checked manually, as {@link ThermalStorageInput#equals(Object)} is NOT restricted on + * the uuid of {@link ThermalStorageInput}. + * + *

In contrast to {@link this#getThermalStorages())} this interface provides the ability to + * pass in an already existing set of {@link OperatorInput} entities, the {@link + * ThermalStorageInput} instances depend on. Doing so, already loaded nodes can be recycled to + * improve performance and prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param thermalBuses a set of object and uuid unique {@link ThermalBusInput} that should be used + * for the returning instances + * @return a set of object and uuid unique {@link ThermalStorageInput} entities + */ + Set getThermalStorages( + Set operators, Set thermalBuses); + + /** + * Returns a unique set of {@link ThermalHouseInput} instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link ThermalHouseInput} which has to be checked + * manually, as {@link ThermalHouseInput#equals(Object)} is NOT restricted on the uuid of {@link + * ThermalHouseInput}. + * + * @return a set of object and uuid unique {@link ThermalHouseInput} entities + */ + Set getThermalHouses(); + + /** + * Returns a set of {@link ThermalHouseInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link ThermalHouseInput} which has to be checked manually, as {@link + * ThermalHouseInput#equals(Object)} is NOT restricted on the uuid of {@link ThermalHouseInput}. + * + *

In contrast to {@link this#getThermalHouses()} this interface provides the ability to pass + * in an already existing set of {@link OperatorInput} entities, the {@link ThermalHouseInput} + * instances depend on. Doing so, already loaded nodes can be recycled to improve performance and + * prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param thermalBuses a set of object and uuid unique {@link ThermalBusInput} that should be used + * for the returning instances + * @return a set of object and uuid unique {@link ThermalHouseInput} entities + */ + Set getThermalHouses( + Set operators, Set thermalBuses); + + /** + * Returns a unique set of {@link CylindricalStorageInput} instances. + * + *

This set has to be unique in the sense of object uniqueness but also in the sense of {@link + * java.util.UUID} uniqueness of the provided {@link CylindricalStorageInput} which has to be + * checked manually, as {@link CylindricalStorageInput#equals(Object)} is NOT restricted on the + * uuid of {@link CylindricalStorageInput}. + * + * @return a set of object and uuid unique {@link CylindricalStorageInput} entities + */ + Set getCylindricStorages(); + + /** + * Returns a set of {@link CylindricalStorageInput} instances. This set has to be unique in the + * sense of object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the + * provided {@link CylindricalStorageInput} which has to be checked manually, as {@link + * CylindricalStorageInput#equals(Object)} is NOT restricted on the uuid of {@link + * CylindricalStorageInput}. + * + *

In contrast to {@link this#getCylindricStorages()} this interface provides the ability to + * pass in an already existing set of {@link OperatorInput} entities, the {@link + * CylindricalStorageInput} instances depend on. Doing so, already loaded nodes can be recycled to + * improve performance and prevent unnecessary loading operations. + * + *

If something fails during the creation process it's up to the concrete implementation of an + * empty set or a set with all entities that has been able to be build is returned. + * + * @param operators a set of object and uuid unique {@link OperatorInput} that should be used for + * the returning instances + * @param thermalBuses a set of object and uuid unique {@link ThermalBusInput} that should be used + * for the returning instances + * @return a set of object and uuid unique {@link CylindricalStorageInput} entities + */ + Set getCylindricStorages( + Set operators, Set thermalBuses); +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/TypeSource.java b/src/main/java/edu/ie3/datamodel/io/source/TypeSource.java index c40cc4768..4b0620947 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/TypeSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/TypeSource.java @@ -5,6 +5,122 @@ */ package edu.ie3.datamodel.io.source; +import edu.ie3.datamodel.models.input.OperatorInput; +import edu.ie3.datamodel.models.input.connector.type.LineTypeInput; +import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput; +import edu.ie3.datamodel.models.input.connector.type.Transformer3WTypeInput; +import edu.ie3.datamodel.models.input.system.type.*; +import java.util.Set; + +/** + * Interface that provides the capability to build entities of type {@link + * SystemParticipantTypeInput} and {@link OperatorInput} from different data sources e.g. .csv files + * or databases + * + * @version 0.1 + * @since 08.04.20 + */ public interface TypeSource extends DataSource { - // TODO + + /** + * Returns a set of {@link Transformer2WTypeInput} instances. This set has to be unique in the + * sense of object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the + * provided {@link Transformer2WTypeInput} which has to be checked manually, as {@link + * Transformer2WTypeInput#equals(Object)} is NOT restricted on the uuid of {@link + * Transformer2WTypeInput}. + * + * @return a set of object and uuid unique {@link Transformer2WTypeInput} entities + */ + Set getTransformer2WTypes(); + + /** + * Returns a set of {@link OperatorInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link OperatorInput} which has to be checked manually, as {@link OperatorInput#equals(Object)} + * is NOT restricted on the uuid of {@link OperatorInput}. + * + * @return a set of object and uuid unique {@link OperatorInput} entities + */ + Set getOperators(); + + /** + * Returns a set of {@link LineTypeInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link LineTypeInput} which has to be checked manually, as {@link LineTypeInput#equals(Object)} + * is NOT restricted on the uuid of {@link LineTypeInput}. + * + * @return a set of object and uuid unique {@link LineTypeInput} entities + */ + Set getLineTypes(); + + /** + * Returns a set of {@link Transformer3WTypeInput} instances. This set has to be unique in the + * sense of object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the + * provided {@link Transformer3WTypeInput} which has to be checked manually, as {@link + * Transformer3WTypeInput#equals(Object)} is NOT restricted on the uuid of {@link + * Transformer3WTypeInput}. + * + * @return a set of object and uuid unique {@link Transformer3WTypeInput} entities + */ + Set getTransformer3WTypes(); + + /** + * Returns a set of {@link BmTypeInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link BmTypeInput} which has to be checked manually, as {@link BmTypeInput#equals(Object)} is + * NOT restricted on the uuid of {@link BmTypeInput}. + * + * @return a set of object and uuid unique {@link BmTypeInput} entities + */ + Set getBmTypes(); + + /** + * Returns a set of {@link ChpTypeInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link ChpTypeInput} which has to be checked manually, as {@link ChpTypeInput#equals(Object)} + * is NOT restricted on the uuid of {@link ChpTypeInput}. + * + * @return a set of object and uuid unique {@link ChpTypeInput} entities + */ + Set getChpTypes(); + + /** + * Returns a set of {@link HpTypeInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link HpTypeInput} which has to be checked manually, as {@link HpTypeInput#equals(Object)} is + * NOT restricted on the uuid of {@link HpTypeInput}. + * + * @return a set of object and uuid unique {@link HpTypeInput} entities + */ + Set getHpTypes(); + + /** + * Returns a set of {@link StorageTypeInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link StorageTypeInput} which has to be checked manually, as {@link + * StorageTypeInput#equals(Object)} is NOT restricted on the uuid of {@link StorageTypeInput}. + * + * @return a set of object and uuid unique {@link StorageTypeInput} entities + */ + Set getStorageTypes(); + + /** + * Returns a set of {@link WecTypeInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link WecTypeInput} which has to be checked manually, as {@link WecTypeInput#equals(Object)} + * is NOT restricted on the uuid of {@link WecTypeInput}. + * + * @return a set of object and uuid unique {@link WecTypeInput} entities + */ + Set getWecTypes(); + + /** + * Returns a set of {@link EvTypeInput} instances. This set has to be unique in the sense of + * object uniqueness but also in the sense of {@link java.util.UUID} uniqueness of the provided + * {@link EvTypeInput} which has to be checked manually, as {@link EvTypeInput#equals(Object)} is + * NOT restricted on the uuid of {@link EvTypeInput}. + * + * @return a set of object and uuid unique {@link EvTypeInput} entities + */ + Set getEvTypes(); } diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java new file mode 100644 index 000000000..ddc305c61 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java @@ -0,0 +1,503 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.source.csv; + +import edu.ie3.datamodel.exceptions.SourceException; +import edu.ie3.datamodel.io.FileNamingStrategy; +import edu.ie3.datamodel.io.connectors.CsvFileConnector; +import edu.ie3.datamodel.io.factory.EntityFactory; +import edu.ie3.datamodel.io.factory.input.AssetInputEntityData; +import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData; +import edu.ie3.datamodel.models.UniqueEntity; +import edu.ie3.datamodel.models.input.AssetInput; +import edu.ie3.datamodel.models.input.AssetTypeInput; +import edu.ie3.datamodel.models.input.NodeInput; +import edu.ie3.datamodel.models.input.OperatorInput; +import edu.ie3.datamodel.utils.ValidationUtils; +import edu.ie3.util.StringUtils; +import java.io.BufferedReader; +import java.io.IOException; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.LongAdder; +import java.util.function.Predicate; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * Parent class of all .csv file related sources containing methods and fields consumed by allmost + * all implementations of .csv file related sources. + * + * @version 0.1 + * @since 05.04.20 + */ +public abstract class CsvDataSource { + + protected static final Logger log = LogManager.getLogger(CsvDataSource.class); + + // general fields + private final String csvSep; + protected final CsvFileConnector connector; + + // field names + protected static final String OPERATOR = "operator"; + protected static final String NODE_A = "nodeA"; + protected static final String NODE_B = "nodeB"; + protected static final String NODE = "node"; + protected static final String TYPE = "type"; + protected static final String FIELDS_TO_VALUES_MAP = "fieldsToValuesMap"; + + public CsvDataSource(String csvSep, String folderPath, FileNamingStrategy fileNamingStrategy) { + this.csvSep = csvSep; + this.connector = new CsvFileConnector(folderPath, fileNamingStrategy); + } + + /** + * Takes a row string of a .csv file and a string array of the csv file headline, tries to split + * the csv row string based and zip it together with the headline. This method does not contain + * any sanity checks. Order of the headline needs to be the same as the fields in the csv row. If + * the zipping fails, an empty map is returned and the causing error is logged. + * + * @param csvRow the csv row string that contains the data + * @param headline the headline fields of the csv file + * @return a map containing the mapping of (fieldName -> fieldValue) or an empty map if an error + * occurred + */ + private Map buildFieldsToAttributes( + final String csvRow, final String[] headline) { + + TreeMap insensitiveFieldsToAttributes = + new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + + final String[] fieldVals = fieldVals(csvSep, csvRow); + + try { + insensitiveFieldsToAttributes.putAll( + IntStream.range(0, fieldVals.length) + .boxed() + .collect( + Collectors.toMap( + k -> StringUtils.snakeCaseToCamelCase(headline[k]), v -> fieldVals[v]))); + + if (insensitiveFieldsToAttributes.size() != headline.length) { + Set fieldsToAttributesKeySet = insensitiveFieldsToAttributes.keySet(); + insensitiveFieldsToAttributes = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + throw new SourceException( + "The size of the headline does not fit to the size of the resulting fields to attributes mapping.\nHeadline: " + + String.join(", ", headline) + + "\nResultingMap: " + + String.join(", ", fieldsToAttributesKeySet) + + "\nCsvRow: " + + csvRow.trim() + + ".\nIs the csv separator in the file matching the separator provided in the constructor ('" + + csvSep + + "') and does the number of columns match the number of headline fields?"); + } + } catch (Exception e) { + log.error( + "Cannot build fields to attributes map for row '{}' with headline '{}'.\nException: {}", + csvRow::trim, + () -> String.join(",", headline), + () -> e); + } + return insensitiveFieldsToAttributes; + } + + /** + * Build an array of from the provided csv row string considering special cases where geoJson or + * {@link edu.ie3.datamodel.models.input.system.characteristic.CharacteristicInput} are provided + * in the csv row string. + * + * @param csvSep the column separator of the csv row string + * @param csvRow the csv row string + * @return an array with one entry per column of the provided csv row string + */ + private String[] fieldVals(String csvSep, String csvRow) { + + /*geo json support*/ + final String geoJsonRegex = "[\\{].+\\}\\}\\}"; + final String geoReplacement = "geoJSON"; + + /*characteristic input support */ + final String charInputRegex = "(cP:|olm:|cosPhiFixed:|cosPhiP:|qV:)\\{.+?\\}"; + final String charReplacement = "charRepl"; + + List geoList = extractMatchingStrings(geoJsonRegex, csvRow); + List charList = extractMatchingStrings(charInputRegex, csvRow); + + AtomicInteger geoCounter = new AtomicInteger(0); + AtomicInteger charCounter = new AtomicInteger(0); + + return Arrays.stream( + csvRow + .replaceAll(charInputRegex, charReplacement) + .replaceAll(geoJsonRegex, geoReplacement) + .replaceAll("\"", "") + .split(csvSep, -1)) + .map( + fieldVal -> { + String returningFieldVal = fieldVal; + if (fieldVal.equalsIgnoreCase(geoReplacement)) { + returningFieldVal = geoList.get(geoCounter.getAndIncrement()); + } + if (fieldVal.equalsIgnoreCase(charReplacement)) { + returningFieldVal = charList.get(charCounter.getAndIncrement()); + } + return returningFieldVal.trim(); + }) + .toArray(String[]::new); + } + + /** + * Extracts all strings from the provided csvRow matching the provided regexString and returns a + * list of strings in the order of their appearance in the csvRow string + * + * @param regexString regex string that should be searched for + * @param csvRow csv row string that should be searched in for the regex string + * @return a list of strings matching the provided regex in the order of their appearance in the + * provided csv row string + */ + private List extractMatchingStrings(String regexString, String csvRow) { + Pattern pattern = Pattern.compile(regexString); + Matcher matcher = pattern.matcher(csvRow); + + ArrayList matchingList = new ArrayList<>(); + while (matcher.find()) { + matchingList.add(matcher.group()); + } + return matchingList; + } + + /** + * Returns either the first instance of a {@link OperatorInput} in the provided collection of or + * {@link OperatorInput#NO_OPERATOR_ASSIGNED} + * + * @param operators the collections of {@link OperatorInput}s that should be searched in + * @param operatorUuid the operator uuid that is requested + * @return either the first found instancen of {@link OperatorInput} or {@link + * OperatorInput#NO_OPERATOR_ASSIGNED} + */ + private OperatorInput getFirstOrDefaultOperator( + Collection operators, String operatorUuid) { + return findFirstEntityByUuid(operatorUuid, operators) + .orElseGet( + () -> { + log.debug( + "Cannot find operator for node with uuid '{}'. Defaulting to 'NO OPERATOR ASSIGNED'.", + operatorUuid); + return OperatorInput.NO_OPERATOR_ASSIGNED; + }); + } + + /** + * Returns a predicate that can be used to filter optionals of {@link UniqueEntity}s and keep + * track on the number of elements that have been empty optionals. This filter let only pass + * optionals that are non-empty. Example usage: + * Collection.stream().filter(isPresentCollectIfNot(NodeInput.class, new ConcurrentHashMap<>())) + * ... + * + * @param entityClass entity class that should be used as they key in the provided counter map + * @param invalidElementsCounterMap a map that counts the number of empty optionals and maps it to + * the provided entity clas + * @param the type of the entity + * @return a predicate that can be used to filter and count empty optionals + */ + protected Predicate> isPresentCollectIfNot( + Class entityClass, + ConcurrentHashMap, LongAdder> invalidElementsCounterMap) { + return o -> { + if (o.isPresent()) { + return true; + } else { + invalidElementsCounterMap.computeIfAbsent(entityClass, k -> new LongAdder()).increment(); + return false; + } + }; + } + + protected void printInvalidElementInformation( + Class entityClass, LongAdder noOfInvalidElements) { + + log.error( + "{} entities of type '{}' are missing required elements!", + noOfInvalidElements, + entityClass.getSimpleName()); + } + + protected String saveMapGet(Map map, String key, String mapName) { + return Optional.ofNullable(map.get(key)) + .orElse( + "Key '" + + key + + "' not found" + + (mapName.isEmpty() ? "!" : " in map '" + mapName + "'!")); + } + + protected void logSkippingWarning( + String entityDesc, String entityUuid, String entityId, String missingElementsString) { + + log.warn( + "Skipping '{}' with uuid '{}' and id '{}'. Not all required entities found or map is missing entity key!\nMissing elements:\n{}", + entityDesc, + entityUuid, + entityId, + missingElementsString); + } + + protected Stream filterEmptyOptionals(Stream> elements) { + return elements.filter(Optional::isPresent).map(Optional::get); + } + + /** + * Returns an {@link Optional} of the first {@link UniqueEntity} element of this collection + * matching the provided UUID or an empty {@code Optional} if no matching entity can be found. + * + * @param entityUuid uuid of the entity that should be looked for + * @param entities collection of entities that should be + * @param type of the entity that will be returned, derived from the provided collection + * @return either an optional containing the first entity that has the provided uuid or an empty + * optional if no matching entity with the provided uuid can be found + */ + protected Optional findFirstEntityByUuid( + String entityUuid, Collection entities) { + return entities.stream() + .parallel() + .filter(uniqueEntity -> uniqueEntity.getUuid().toString().equalsIgnoreCase(entityUuid)) + .findFirst(); + } + + /** + * Tries to open a file reader from the connector based on the provided entity class, reads the + * first line (considered to be the headline with headline fields) and returns a stream of + * (fieldName -> fieldValue) mapping where each map represents one row of the .csv file. Since the + * returning stream is a parallel stream, the order of the elements cannot be guaranteed. + * + * @param entityClass the entity class that should be build and that is used to get the + * corresponding reader + * @param connector the connector that should be used to get the reader from + * @return a parallel stream of maps, where each map represents one row of the csv file with the + * mapping (fieldName -> fieldValue) + */ + protected Stream> buildStreamWithFieldsToAttributesMap( + Class entityClass, CsvFileConnector connector) { + try (BufferedReader reader = connector.initReader(entityClass)) { + String[] headline = reader.readLine().replaceAll("\"", "").split(csvSep); + // by default try-with-resources closes the reader directly when we leave this method (which + // is wanted to avoid a lock on the file), but this causes a closing of the stream as well. + // As we still want to consume the data at other places, we start a new stream instead of + // returning the original one + Collection> allRows = + reader + .lines() + .parallel() + .map(csvRow -> buildFieldsToAttributes(csvRow, headline)) + .filter(map -> !map.isEmpty()) + .collect(Collectors.toList()); + + return distinctRowsWithLog(entityClass, allRows).parallelStream(); + + } catch (IOException e) { + log.warn( + "Cannot read file to build entity '{}': {}", entityClass.getSimpleName(), e.getMessage()); + } + + return Stream.empty(); + } + + /** + * Returns a collection of maps each representing a row in csv file that can be used to built an + * instance of a {@link UniqueEntity}. The uniqueness of each row is doubled checked by a) that no + * duplicated rows are returned that are full (1:1) matches and b) that no rows are returned that + * have the same UUID but different field values. As the later case (b) is destroying the contract + * of UUIDs an empty set is returned to indicate that these data cannot be processed safely and + * the error is logged. For case a), only the duplicates are filtered out an a set with unique + * rows is returned. + * + * @param entityClass the entity class that should be built based on the provided (fieldName -> + * fieldValue) collection + * @param allRows collection of rows of a csv file an entity should be built from + * @param type of the entity + * @return either a set containing only unique rows or an empty set if at least two rows with the + * same UUID but different field values exist + */ + private Set> distinctRowsWithLog( + Class entityClass, Collection> allRows) { + Set> allRowsSet = new HashSet<>(allRows); + // check for duplicated rows that match exactly (full duplicates) -> sanity only, not crucial + if (allRows.size() != allRowsSet.size()) { + log.warn( + "File with '{}' entities contains {} exact duplicated rows. File cleanup is recommended!", + entityClass.getSimpleName(), + (allRows.size() - allRowsSet.size())); + } + + // check for rows that match exactly by their UUID, but have different fields -> crucial, we + // allow only unique UUID entities + Set> distinctUuidRowSet = + allRowsSet + .parallelStream() + .filter(ValidationUtils.distinctByKey(x -> x.get("uuid"))) + .collect(Collectors.toSet()); + if (distinctUuidRowSet.size() != allRowsSet.size()) { + allRowsSet.removeAll(distinctUuidRowSet); + String affectedUuids = + allRowsSet.stream().map(row -> row.get("uuid")).collect(Collectors.joining(",\n")); + log.error( + "'{}' entities with duplicated UUIDs, but different field values found! Please review the corresponding input file!\nAffected UUIDs:\n{}", + entityClass.getSimpleName(), + affectedUuids); + // if this happens, we return an empty set to prevent further processing + return new HashSet<>(); + } + + return allRowsSet; + } + + /** + * Checks if the requested type of an asset can be found in the provided collection of types based + * on the provided fields to values mapping. The provided fields to values mapping needs to have + * one and only one field with key {@link this#TYPE} and a corresponding UUID value. If the type + * can be found in the provided collection based on the UUID it is returned wrapped in an + * optional. Otherwise an empty optional is returned and a warning is logged. + * + * @param types a collection of types that should be used for searching + * @param fieldsToAttributes the field name to value mapping incl. the key {@link this#TYPE} + * @param skippedClassString debug string of the class that will be skipping + * @param the type of the resulting type instance + * @return either an optional containing the type or an empty optional if the type cannot be found + */ + protected Optional getAssetType( + Collection types, Map fieldsToAttributes, String skippedClassString) { + + Optional assetType = + Optional.ofNullable(fieldsToAttributes.get(TYPE)) + .flatMap(typeUuid -> findFirstEntityByUuid(typeUuid, types)); + + // if the type is not present we return an empty element and + // log a warning + if (!assetType.isPresent()) { + logSkippingWarning( + skippedClassString, + saveMapGet(fieldsToAttributes, "uuid", FIELDS_TO_VALUES_MAP), + saveMapGet(fieldsToAttributes, "id", FIELDS_TO_VALUES_MAP), + TYPE + ": " + saveMapGet(fieldsToAttributes, TYPE, FIELDS_TO_VALUES_MAP)); + } + return assetType; + } + + /** + * Returns a stream of optional {@link AssetInputEntityData} that can be used to build instances + * of several subtypes of {@link UniqueEntity} by a corresponding {@link EntityFactory} that + * consumes this data. + * + * @param entityClass the entity class that should be build + * @param operators a collection of {@link OperatorInput} entities that should be used to build + * the data + * @param type of the entity that should be build + * @return stream of optionals of the entity data or empty optionals of the operator required for + * the data cannot be found + */ + protected Stream assetInputEntityDataStream( + Class entityClass, Collection operators) { + return buildStreamWithFieldsToAttributesMap(entityClass, connector) + .map( + fieldsToAttributes -> + assetInputEntityDataStream(entityClass, fieldsToAttributes, operators)); + } + + protected AssetInputEntityData assetInputEntityDataStream( + Class entityClass, + Map fieldsToAttributes, + Collection operators) { + + // get the operator of the entity + String operatorUuid = fieldsToAttributes.get(OPERATOR); + OperatorInput operator = getFirstOrDefaultOperator(operators, operatorUuid); + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().removeAll(new HashSet<>(Collections.singletonList(OPERATOR))); + + return new AssetInputEntityData(fieldsToAttributes, entityClass, operator); + } + + /** + * Returns a stream of optional {@link NodeAssetInputEntityData} that can be used to build + * instances of several subtypes of {@link UniqueEntity} by a corresponding {@link EntityFactory} + * that consumes this data. param assetInputEntityDataStream + * + * @param assetInputEntityDataStream a stream consisting of {@link AssetInputEntityData} that is + * enriched with {@link NodeInput} data + * @param nodes a collection of {@link NodeInput} entities that should be used to build the data + * @return stream of optionals of the entity data or empty optionals of the node required for the + * data cannot be found + */ + protected Stream> nodeAssetInputEntityDataStream( + Stream assetInputEntityDataStream, Collection nodes) { + + return assetInputEntityDataStream + .parallel() + .map( + assetInputEntityData -> { + + // get the raw data + Map fieldsToAttributes = assetInputEntityData.getFieldsToValues(); + + // get the node of the entity + String nodeUuid = fieldsToAttributes.get(NODE); + Optional node = findFirstEntityByUuid(nodeUuid, nodes); + + // if the node is not present we return an empty element and + // log a warning + if (!node.isPresent()) { + logSkippingWarning( + assetInputEntityData.getEntityClass().getSimpleName(), + fieldsToAttributes.get("uuid"), + fieldsToAttributes.get("id"), + NODE + ": " + nodeUuid); + return Optional.empty(); + } + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove(NODE); + + return Optional.of( + new NodeAssetInputEntityData( + fieldsToAttributes, + assetInputEntityData.getEntityClass(), + assetInputEntityData.getOperatorInput(), + node.get())); + }); + } + + /** + * Returns a stream of optional entities that can be build by using {@link + * NodeAssetInputEntityData} and their corresponding factory. + * + * @param entityClass the entity class that should be build + * @param factory the factory that should be used for the building process + * @param nodes a collection of {@link NodeInput} entities that should be used to build the + * entities + * @param operators a collection of {@link OperatorInput} entities should be used to build the + * entities + * @param type of the entity that should be build + * @return stream of optionals of the entities that has been built by the factor or empty + * optionals if the entity could not have been build + */ + protected Stream> nodeAssetEntityStream( + Class entityClass, + EntityFactory factory, + Collection nodes, + Collection operators) { + return nodeAssetInputEntityDataStream(assetInputEntityDataStream(entityClass, operators), nodes) + .map(dataOpt -> dataOpt.flatMap(factory::getEntity)); + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvGraphicSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvGraphicSource.java new file mode 100644 index 000000000..331fcf336 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvGraphicSource.java @@ -0,0 +1,239 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.source.csv; + +import edu.ie3.datamodel.io.FileNamingStrategy; +import edu.ie3.datamodel.io.factory.input.graphics.LineGraphicInputEntityData; +import edu.ie3.datamodel.io.factory.input.graphics.LineGraphicInputFactory; +import edu.ie3.datamodel.io.factory.input.graphics.NodeGraphicInputEntityData; +import edu.ie3.datamodel.io.factory.input.graphics.NodeGraphicInputFactory; +import edu.ie3.datamodel.io.source.GraphicSource; +import edu.ie3.datamodel.io.source.RawGridSource; +import edu.ie3.datamodel.io.source.TypeSource; +import edu.ie3.datamodel.models.UniqueEntity; +import edu.ie3.datamodel.models.input.NodeInput; +import edu.ie3.datamodel.models.input.OperatorInput; +import edu.ie3.datamodel.models.input.connector.LineInput; +import edu.ie3.datamodel.models.input.connector.type.LineTypeInput; +import edu.ie3.datamodel.models.input.container.GraphicElements; +import edu.ie3.datamodel.models.input.graphics.LineGraphicInput; +import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.LongAdder; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * Implementation of the {@link GraphicSource} interface to read {@link NodeGraphicInput} and {@link + * LineGraphicInput} entities from .csv files + * + * @version 0.1 + * @since 08.04.20 + */ +public class CsvGraphicSource extends CsvDataSource implements GraphicSource { + + // general fields + private final TypeSource typeSource; + private final RawGridSource rawGridSource; + + // factories + private final LineGraphicInputFactory lineGraphicInputFactory; + private final NodeGraphicInputFactory nodeGraphicInputFactory; + + public CsvGraphicSource( + String csvSep, + String folderPath, + FileNamingStrategy fileNamingStrategy, + TypeSource typeSource, + RawGridSource rawGridSource) { + super(csvSep, folderPath, fileNamingStrategy); + this.typeSource = typeSource; + this.rawGridSource = rawGridSource; + + // init factories + this.lineGraphicInputFactory = new LineGraphicInputFactory(); + this.nodeGraphicInputFactory = new NodeGraphicInputFactory(); + } + + /** {@inheritDoc} */ + @Override + public Optional getGraphicElements() { + + // read all needed entities + /// start with types and operators + Set operators = typeSource.getOperators(); + Set lineTypes = typeSource.getLineTypes(); + + Set nodes = rawGridSource.getNodes(operators); + Set lines = rawGridSource.getLines(nodes, lineTypes, operators); + + // start with the entities needed for a GraphicElements entity + /// as we want to return a working grid, keep an eye on empty optionals + ConcurrentHashMap, LongAdder> nonBuildEntities = + new ConcurrentHashMap<>(); + + Set nodeGraphics = + buildNodeGraphicEntityData(nodes) + .map(dataOpt -> dataOpt.flatMap(nodeGraphicInputFactory::getEntity)) + .filter(isPresentCollectIfNot(NodeGraphicInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + + Set lineGraphics = + buildLineGraphicEntityData(lines) + .map(dataOpt -> dataOpt.flatMap(lineGraphicInputFactory::getEntity)) + .filter(isPresentCollectIfNot(LineGraphicInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + + // if we found invalid elements return an empty optional and log the problems + if (!nonBuildEntities.isEmpty()) { + nonBuildEntities.forEach(this::printInvalidElementInformation); + return Optional.empty(); + } + + // if everything is fine, return a GraphicElements instance + return Optional.of(new GraphicElements(nodeGraphics, lineGraphics)); + } + /** {@inheritDoc} */ + @Override + public Set getNodeGraphicInput() { + return getNodeGraphicInput(rawGridSource.getNodes(typeSource.getOperators())); + } + + /** + * {@inheritDoc} + * + *

If the set of {@link NodeInput} entities is not exhaustive for all available {@link + * NodeGraphicInput} entities or if an error during the building process occurs, all entities that + * has been able to be built are returned and the not-built ones are ignored (= filtered out). + */ + @Override + public Set getNodeGraphicInput(Set nodes) { + return filterEmptyOptionals( + buildNodeGraphicEntityData(nodes) + .map(dataOpt -> dataOpt.flatMap(nodeGraphicInputFactory::getEntity))) + .collect(Collectors.toSet()); + } + + /** {@inheritDoc} */ + @Override + public Set getLineGraphicInput() { + Set operators = typeSource.getOperators(); + return getLineGraphicInput( + rawGridSource.getLines( + rawGridSource.getNodes(operators), typeSource.getLineTypes(), operators)); + } + + /** + * {@inheritDoc} + * + *

If the set of {@link LineInput} entities is not exhaustive for all available {@link + * LineGraphicInput} entities or if an error during the building process occurs, all entities that + * has been able to be built are returned and the not-built ones are ignored (= filtered out). + */ + @Override + public Set getLineGraphicInput(Set lines) { + + return filterEmptyOptionals( + buildLineGraphicEntityData(lines) + .map(dataOpt -> dataOpt.flatMap(lineGraphicInputFactory::getEntity))) + .collect(Collectors.toSet()); + } + + /** + * Builds a stream of {@link NodeGraphicInputEntityData} instances that can be consumed by a + * {@link NodeGraphicInputFactory} to build instances of {@link NodeGraphicInput} entities. This + * method depends on corresponding instances of {@link NodeInput} entities that are represented by + * a corresponding {@link NodeGraphicInput} entity. The determination of matching {@link + * NodeInput} and {@link NodeGraphicInput} entities is carried out by the UUID of the {@link + * NodeInput} entity. Hence it is crucial to only pass over collections that are pre-checked for + * the uniqueness of the UUIDs of the nodes they contain. No further sanity checks are included in + * this method. If no UUID of a {@link NodeInput} entity can be found for a {@link + * NodeGraphicInputEntityData} instance, an empty optional is included in the stream and warning + * is logged. + * + * @param nodes a set of nodes with unique uuids + * @return a stream of optional {@link NodeGraphicInput} entities + */ + private Stream> buildNodeGraphicEntityData( + Set nodes) { + return buildStreamWithFieldsToAttributesMap(NodeGraphicInput.class, connector) + .map(fieldsToAttributes -> buildNodeGraphicEntityData(fieldsToAttributes, nodes)); + } + + private Optional buildNodeGraphicEntityData( + Map fieldsToAttributes, Set nodes) { + + // get the node of the entity + String nodeUuid = fieldsToAttributes.get(NODE); + Optional node = findFirstEntityByUuid(nodeUuid, nodes); + + // if the node is not present we return an empty element and + // log a warning + if (!node.isPresent()) { + logSkippingWarning( + NodeGraphicInput.class.getSimpleName(), + fieldsToAttributes.get("uuid"), + "no id (graphic entities don't have one)", + NODE + ": " + nodeUuid); + return Optional.empty(); + } + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove(NODE); + + return Optional.of(new NodeGraphicInputEntityData(fieldsToAttributes, node.get())); + } + + /** + * Builds a stream of {@link LineGraphicInputEntityData} instances that can be consumed by a + * {@link LineGraphicInputFactory} to build instances of {@link LineGraphicInput} entities. This + * method depends on corresponding instances of {@link LineInput} entities that are represented by + * a corresponding {@link LineGraphicInput} entity. The determination of matching {@link + * LineInput} and {@link LineGraphicInput} entities is carried out by the UUID of the {@link + * LineInput} entity. Hence it is crucial to only pass over collections that are pre-checked for + * the uniqueness of the UUIDs of the nodes they contain. No further sanity checks are included in + * this method. If no UUID of a {@link LineInput} entity can be found for a {@link + * LineGraphicInputEntityData} instance, an empty optional is included in the stream and warning + * is logged. + * + * @param lines a set of lines with unique uuids + * @return a stream of optional {@link LineGraphicInput} entities + */ + private Stream> buildLineGraphicEntityData( + Set lines) { + return buildStreamWithFieldsToAttributesMap(LineGraphicInput.class, connector) + .map(fieldsToAttributes -> buildLineGraphicEntityData(fieldsToAttributes, lines)); + } + + private Optional buildLineGraphicEntityData( + Map fieldsToAttributes, Set lines) { + + // get the node of the entity + String lineUuid = fieldsToAttributes.get("line"); + Optional line = findFirstEntityByUuid(lineUuid, lines); + + // if the node is not present we return an empty element and + // log a warning + if (!line.isPresent()) { + logSkippingWarning( + LineGraphicInput.class.getSimpleName(), + fieldsToAttributes.get("uuid"), + "no id (graphic entities don't have one)", + "line: " + lineUuid); + return Optional.empty(); + } + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove("line"); + + return Optional.of(new LineGraphicInputEntityData(fieldsToAttributes, line.get())); + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvRawGridSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvRawGridSource.java new file mode 100644 index 000000000..82fdacd2d --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvRawGridSource.java @@ -0,0 +1,550 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.source.csv; + +import edu.ie3.datamodel.io.FileNamingStrategy; +import edu.ie3.datamodel.io.factory.EntityFactory; +import edu.ie3.datamodel.io.factory.input.*; +import edu.ie3.datamodel.io.source.RawGridSource; +import edu.ie3.datamodel.io.source.TypeSource; +import edu.ie3.datamodel.models.UniqueEntity; +import edu.ie3.datamodel.models.input.*; +import edu.ie3.datamodel.models.input.connector.LineInput; +import edu.ie3.datamodel.models.input.connector.SwitchInput; +import edu.ie3.datamodel.models.input.connector.Transformer2WInput; +import edu.ie3.datamodel.models.input.connector.Transformer3WInput; +import edu.ie3.datamodel.models.input.connector.type.LineTypeInput; +import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput; +import edu.ie3.datamodel.models.input.connector.type.Transformer3WTypeInput; +import edu.ie3.datamodel.models.input.container.RawGridElements; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.LongAdder; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * Source that provides the capability to build entities that are hold by a {@link RawGridElements} + * as well as the {@link RawGridElements} container from .csv files. + * + *

This source is not buffered which means each call on a getter method always tries to + * read all data is necessary to return the requested objects in a hierarchical cascading way. + * + *

If performance is an issue, it is recommended to read the data cascading starting with reading + * nodes and then using the getters with arguments to avoid reading the same data multiple times. + * + *

The resulting sets are always unique on object and UUID base (with distinct UUIDs). + * + * @version 0.1 + * @since 03.04.20 + */ +public class CsvRawGridSource extends CsvDataSource implements RawGridSource { + + // general fields + private final TypeSource typeSource; + + // factories + private final NodeInputFactory nodeInputFactory; + private final LineInputFactory lineInputFactory; + private final Transformer2WInputFactory transformer2WInputFactory; + private final Transformer3WInputFactory transformer3WInputFactory; + private final SwitchInputFactory switchInputFactory; + private final MeasurementUnitInputFactory measurementUnitInputFactory; + + public CsvRawGridSource( + String csvSep, + String gridFolderPath, + FileNamingStrategy fileNamingStrategy, + TypeSource typeSource) { + super(csvSep, gridFolderPath, fileNamingStrategy); + this.typeSource = typeSource; + + // init factories + this.nodeInputFactory = new NodeInputFactory(); + this.lineInputFactory = new LineInputFactory(); + this.transformer2WInputFactory = new Transformer2WInputFactory(); + this.transformer3WInputFactory = new Transformer3WInputFactory(); + this.switchInputFactory = new SwitchInputFactory(); + this.measurementUnitInputFactory = new MeasurementUnitInputFactory(); + } + + /** {@inheritDoc} */ + @Override + public Optional getGridData() { + + /* read all needed entities start with the types and operators */ + Set operators = typeSource.getOperators(); + Set lineTypes = typeSource.getLineTypes(); + Set transformer2WTypeInputs = typeSource.getTransformer2WTypes(); + Set transformer3WTypeInputs = typeSource.getTransformer3WTypes(); + + /* assets */ + Set nodes = getNodes(operators); + + /* start with the entities needed for a RawGridElement as we want to return a working grid, keep an eye on empty + * optionals which is equal to elements that have been unable to be built e.g. due to missing elements they depend + * on + */ + ConcurrentHashMap, LongAdder> nonBuildEntities = + new ConcurrentHashMap<>(); + + Set lineInputs = + typedEntityStream(LineInput.class, lineInputFactory, nodes, operators, lineTypes) + .filter(isPresentCollectIfNot(LineInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + Set transformer2WInputs = + typedEntityStream( + Transformer2WInput.class, + transformer2WInputFactory, + nodes, + operators, + transformer2WTypeInputs) + .filter(isPresentCollectIfNot(Transformer2WInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + Set transformer3WInputs = + transformer3WEntityStream(nodes, transformer3WTypeInputs, operators) + .filter(isPresentCollectIfNot(Transformer3WInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + Set switches = + untypedConnectorInputEntityStream(SwitchInput.class, switchInputFactory, nodes, operators) + .filter(isPresentCollectIfNot(SwitchInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + Set measurementUnits = + nodeAssetEntityStream( + MeasurementUnitInput.class, measurementUnitInputFactory, nodes, operators) + .filter(isPresentCollectIfNot(MeasurementUnitInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + + /* if we found non-build elements return an empty optional and log the problems */ + if (!nonBuildEntities.isEmpty()) { + nonBuildEntities.forEach(this::printInvalidElementInformation); + return Optional.empty(); + } + + /* build the grid */ + RawGridElements gridElements = + new RawGridElements( + nodes, + lineInputs, + transformer2WInputs, + transformer3WInputs, + switches, + measurementUnits); + + /* return the grid if it is not empty */ + return gridElements.allEntitiesAsList().isEmpty() + ? Optional.empty() + : Optional.of(gridElements); + } + + /** {@inheritDoc} */ + @Override + public Set getNodes() { + return getNodes(typeSource.getOperators()); + } + + /** + * {@inheritDoc} + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getNodes(Set operators) { + return filterEmptyOptionals( + assetInputEntityDataStream(NodeInput.class, operators).map(nodeInputFactory::getEntity)) + .collect(Collectors.toSet()); + } + + /** {@inheritDoc} */ + @Override + public Set getLines() { + Set operators = typeSource.getOperators(); + return getLines(getNodes(operators), typeSource.getLineTypes(), operators); + } + + /** + * {@inheritDoc} + * + *

If one of the sets of {@link NodeInput} or {@link LineTypeInput} entities is not exhaustive + * for all available {@link LineInput} entities (e.g. a {@link NodeInput} or {@link LineTypeInput} + * entity is missing) or if an error during the building process occurs, the entity that misses + * something will be skipped (which can be seen as a filtering functionality) but all entities + * that are able to be built will be returned anyway and the elements that couldn't have been + * built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getLines( + Set nodes, Set lineTypeInputs, Set operators) { + return filterEmptyOptionals( + typedEntityStream(LineInput.class, lineInputFactory, nodes, operators, lineTypeInputs)) + .collect(Collectors.toSet()); + } + + /** {@inheritDoc} */ + @Override + public Set get2WTransformers() { + Set operators = typeSource.getOperators(); + return get2WTransformers(getNodes(operators), typeSource.getTransformer2WTypes(), operators); + } + + /** + * {@inheritDoc} + * + *

If one of the sets of {@link NodeInput} or {@link Transformer2WTypeInput} entities is not + * exhaustive for all available {@link Transformer2WInput} entities (e.g. a {@link NodeInput} or + * {@link Transformer2WTypeInput} entity is missing) or if an error during the building process + * occurs, the entity that misses something will be skipped (which can be seen as a filtering + * functionality) but all entities that are able to be built will be returned anyway and the + * elements that couldn't have been built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set get2WTransformers( + Set nodes, + Set transformer2WTypes, + Set operators) { + return filterEmptyOptionals( + typedEntityStream( + Transformer2WInput.class, + transformer2WInputFactory, + nodes, + operators, + transformer2WTypes)) + .collect(Collectors.toSet()); + } + + /** {@inheritDoc} */ + @Override + public Set get3WTransformers() { + Set operators = typeSource.getOperators(); + return get3WTransformers(getNodes(operators), typeSource.getTransformer3WTypes(), operators); + } + + /** + * {@inheritDoc} + * + *

If one of the sets of {@link NodeInput} or {@link Transformer3WTypeInput} entities is not + * exhaustive for all available {@link Transformer3WInput} entities (e.g. a {@link NodeInput} or + * {@link Transformer3WTypeInput} entity is missing) or if an error during the building process + * occurs, the entity that misses something will be skipped (which can be seen as a filtering + * functionality) but all entities that are able to be built will be returned anyway and the + * elements that couldn't have been built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set get3WTransformers( + Set nodes, + Set transformer3WTypeInputs, + Set operators) { + + return filterEmptyOptionals( + transformer3WEntityStream(nodes, transformer3WTypeInputs, operators)) + .collect(Collectors.toSet()); + } + + private Stream> transformer3WEntityStream( + Set nodes, + Set transformer3WTypeInputs, + Set operators) { + + return buildTransformer3WEntityData( + buildTypedConnectorEntityData( + buildUntypedConnectorInputEntityData( + assetInputEntityDataStream(Transformer3WInput.class, operators), nodes), + transformer3WTypeInputs), + nodes) + .map(dataOpt -> dataOpt.flatMap(transformer3WInputFactory::getEntity)); + } + + /** {@inheritDoc} */ + @Override + public Set getSwitches() { + Set operators = typeSource.getOperators(); + return getSwitches(getNodes(operators), operators); + } + + /** + * {@inheritDoc} + * + *

If one of the sets of {@link NodeInput} entities is not exhaustive for all available {@link + * SwitchInput} entities (e.g. a {@link NodeInput} entity is missing) or if an error during the + * building process occurs, the entity that misses something will be skipped (which can be seen as + * a filtering functionality) but all entities that are able to be built will be returned anyway + * and the elements that couldn't have been built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getSwitches(Set nodes, Set operators) { + + return filterEmptyOptionals( + untypedConnectorInputEntityStream( + SwitchInput.class, switchInputFactory, nodes, operators)) + .collect(Collectors.toSet()); + } + + private Stream> untypedConnectorInputEntityStream( + Class entityClass, + EntityFactory factory, + Set nodes, + Set operators) { + + return buildUntypedConnectorInputEntityData( + assetInputEntityDataStream(entityClass, operators), nodes) + .map(dataOpt -> dataOpt.flatMap(factory::getEntity)); + } + + /** {@inheritDoc} */ + @Override + public Set getMeasurementUnits() { + Set operators = typeSource.getOperators(); + return getMeasurementUnits(getNodes(operators), operators); + } + + /** + * {@inheritDoc} + * + *

If one of the sets of {@link NodeInput} entities is not exhaustive for all available {@link + * MeasurementUnitInput} entities (e.g. a {@link NodeInput} entity is missing) or if an error + * during the building process occurs, the entity that misses something will be skipped (which can + * be seen as a filtering functionality) but all entities that are able to be built will be + * returned anyway and the elements that couldn't have been built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getMeasurementUnits( + Set nodes, Set operators) { + return filterEmptyOptionals( + nodeAssetEntityStream( + MeasurementUnitInput.class, measurementUnitInputFactory, nodes, operators)) + .collect(Collectors.toSet()); + } + + private Stream> typedEntityStream( + Class entityClass, + EntityFactory> factory, + Collection nodes, + Collection operators, + Collection types) { + + return buildTypedConnectorEntityData( + buildUntypedConnectorInputEntityData( + assetInputEntityDataStream(entityClass, operators), nodes), + types) + .map(dataOpt -> dataOpt.flatMap(factory::getEntity)); + } + + /** + * Converts a stream of {@link AssetInputEntityData} in connection with a collection of known + * {@link NodeInput}s to a stream of {@link ConnectorInputEntityData}. + * + * @param assetInputEntityDataStream Input stream of {@link AssetInputEntityData} + * @param nodes A collection of known nodes + * @return A stream on option to matching {@link ConnectorInputEntityData} + */ + private Stream> buildUntypedConnectorInputEntityData( + Stream assetInputEntityDataStream, Collection nodes) { + return assetInputEntityDataStream + .parallel() + .map( + assetInputEntityData -> + buildUntypedConnectorInputEntityData(assetInputEntityData, nodes)); + } + + /** + * Converts a single given {@link AssetInputEntityData} in connection with a collection of known + * {@link NodeInput}s to {@link ConnectorInputEntityData}. If this is not possible, an empty + * option is given back. + * + * @param assetInputEntityData Input entity data to convert + * @param nodes A collection of known nodes + * @return An option to matching {@link ConnectorInputEntityData} + */ + private Optional buildUntypedConnectorInputEntityData( + AssetInputEntityData assetInputEntityData, Collection nodes) { + // get the raw data + Map fieldsToAttributes = assetInputEntityData.getFieldsToValues(); + + // get the two connector nodes + String nodeAUuid = fieldsToAttributes.get(NODE_A); + String nodeBUuid = fieldsToAttributes.get(NODE_B); + Optional nodeA = findFirstEntityByUuid(nodeAUuid, nodes); + Optional nodeB = findFirstEntityByUuid(nodeBUuid, nodes); + + // if nodeA or nodeB are not present we return an empty element and log a + // warning + if (!nodeA.isPresent() || !nodeB.isPresent()) { + String debugString = + Stream.of( + new AbstractMap.SimpleEntry<>(nodeA, NODE_A + ": " + nodeAUuid), + new AbstractMap.SimpleEntry<>(nodeB, NODE_B + ": " + nodeBUuid)) + .filter(entry -> !entry.getKey().isPresent()) + .map(AbstractMap.SimpleEntry::getValue) + .collect(Collectors.joining("\n")); + + logSkippingWarning( + assetInputEntityData.getEntityClass().getSimpleName(), + fieldsToAttributes.get("uuid"), + fieldsToAttributes.get("id"), + debugString); + return Optional.empty(); + } + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().removeAll(new HashSet<>(Arrays.asList(NODE_A, NODE_B))); + + return Optional.of( + new ConnectorInputEntityData( + fieldsToAttributes, + assetInputEntityData.getEntityClass(), + assetInputEntityData.getOperatorInput(), + nodeA.get(), + nodeB.get())); + } + + /** + * Enriches the given untyped entity data with the equivalent asset type. If this is not possible, + * an empty Optional is returned + * + * @param noTypeConnectorEntityDataStream Stream of untyped entity data + * @param availableTypes Yet available asset types + * @param Type of the asset type + * @return Stream of option to enhanced data + */ + private + Stream>> buildTypedConnectorEntityData( + Stream> noTypeConnectorEntityDataStream, + Collection availableTypes) { + return noTypeConnectorEntityDataStream + .parallel() + .map( + noTypeEntityDataOpt -> + noTypeEntityDataOpt.flatMap( + noTypeEntityData -> findAndAddType(noTypeEntityData, availableTypes))); + } + + /** + * Finds the required asset type and if present, adds it to the untyped entity data + * + * @param untypedEntityData Untyped entity data to enrich + * @param availableTypes Yet available asset types + * @param Type of the asset type + * @return Option to enhanced data + */ + private Optional> findAndAddType( + ConnectorInputEntityData untypedEntityData, Collection availableTypes) { + Optional assetTypeOption = + getAssetType( + availableTypes, + untypedEntityData.getFieldsToValues(), + untypedEntityData.getClass().getSimpleName()); + return assetTypeOption.map(assetType -> addTypeToEntityData(untypedEntityData, assetType)); + } + + /** + * Enriches the given, untyped entity data with the provided asset type + * + * @param untypedEntityData Untyped entity data to enrich + * @param assetType Asset type to add + * @param Type of the asset type + * @return The enriched entity data + */ + private TypedConnectorInputEntityData addTypeToEntityData( + ConnectorInputEntityData untypedEntityData, T assetType) { + Map fieldsToAttributes = untypedEntityData.getFieldsToValues(); + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove(TYPE); + + // build result object + return new TypedConnectorInputEntityData<>( + fieldsToAttributes, + untypedEntityData.getEntityClass(), + untypedEntityData.getOperatorInput(), + untypedEntityData.getNodeA(), + untypedEntityData.getNodeB(), + assetType); + } + + /** + * Enriches the Stream of options on {@link Transformer3WInputEntityData} with the information of + * the internal node + * + * @param typedConnectorEntityDataStream Stream of already typed input entity data + * @param nodes Yet available nodes + * @return A stream of options on enriched data + */ + private Stream> buildTransformer3WEntityData( + Stream>> + typedConnectorEntityDataStream, + Collection nodes) { + return typedConnectorEntityDataStream + .parallel() + .map( + typedEntityDataOpt -> + typedEntityDataOpt.flatMap(typeEntityData -> addThirdNode(typeEntityData, nodes))); + } + + /** + * Enriches the third node to the already typed entity data of a three winding transformer. If no + * matching node can be found, return an empty Optional. + * + * @param typeEntityData Already typed entity data + * @param nodes Yet available nodes + * @return An option to the enriched data + */ + private Optional addThirdNode( + TypedConnectorInputEntityData typeEntityData, + Collection nodes) { + + // get the raw data + Map fieldsToAttributes = typeEntityData.getFieldsToValues(); + + // get nodeC of the transformer + String nodeCUuid = fieldsToAttributes.get("nodeC"); + Optional nodeC = findFirstEntityByUuid(nodeCUuid, nodes); + + // if nodeC is not present we return an empty element and + // log a warning + if (!nodeC.isPresent()) { + logSkippingWarning( + typeEntityData.getEntityClass().getSimpleName(), + fieldsToAttributes.get("uuid"), + fieldsToAttributes.get("id"), + "nodeC: " + nodeCUuid); + return Optional.empty(); + } + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove("nodeC"); + + return Optional.of( + new Transformer3WInputEntityData( + fieldsToAttributes, + typeEntityData.getEntityClass(), + typeEntityData.getOperatorInput(), + typeEntityData.getNodeA(), + typeEntityData.getNodeB(), + nodeC.get(), + typeEntityData.getType())); + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvSystemParticipantSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvSystemParticipantSource.java new file mode 100644 index 000000000..203413112 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvSystemParticipantSource.java @@ -0,0 +1,724 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.source.csv; + +import edu.ie3.datamodel.io.FileNamingStrategy; +import edu.ie3.datamodel.io.factory.EntityFactory; +import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData; +import edu.ie3.datamodel.io.factory.input.participant.*; +import edu.ie3.datamodel.io.source.RawGridSource; +import edu.ie3.datamodel.io.source.SystemParticipantSource; +import edu.ie3.datamodel.io.source.ThermalSource; +import edu.ie3.datamodel.io.source.TypeSource; +import edu.ie3.datamodel.models.UniqueEntity; +import edu.ie3.datamodel.models.input.*; +import edu.ie3.datamodel.models.input.container.SystemParticipants; +import edu.ie3.datamodel.models.input.system.*; +import edu.ie3.datamodel.models.input.system.type.*; +import edu.ie3.datamodel.models.input.thermal.ThermalBusInput; +import edu.ie3.datamodel.models.input.thermal.ThermalStorageInput; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.LongAdder; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.lang3.NotImplementedException; + +/** + * Source that provides the capability to build entities of type {@link SystemParticipantInput} as + * well as {@link SystemParticipants} container from .csv files. + * + *

This source is not buffered which means each call on a getter method always tries to + * read all data is necessary to return the requested objects in a hierarchical cascading way. + * + *

If performance is an issue, it is recommended to read the data cascading starting with reading + * nodes and then using the getters with arguments to avoid reading the same data multiple times. + * + *

The resulting sets are always unique on object and UUID base (with distinct UUIDs). + * + * @version 0.1 + * @since 03.04.20 + */ +public class CsvSystemParticipantSource extends CsvDataSource implements SystemParticipantSource { + + private static final String THERMAL_STORAGE = "thermalstorage"; + private static final String THERMAL_BUS = "thermalbus"; + + // general fields + private final TypeSource typeSource; + private final RawGridSource rawGridSource; + private final ThermalSource thermalSource; + + // factories + private final BmInputFactory bmInputFactory; + private final ChpInputFactory chpInputFactory; + private final EvInputFactory evInputFactory; + private final FixedFeedInInputFactory fixedFeedInInputFactory; + private final HpInputFactory hpInputFactory; + private final LoadInputFactory loadInputFactory; + private final PvInputFactory pvInputFactory; + private final StorageInputFactory storageInputFactory; + private final WecInputFactory wecInputFactory; + + public CsvSystemParticipantSource( + String csvSep, + String participantsFolderPath, + FileNamingStrategy fileNamingStrategy, + TypeSource typeSource, + ThermalSource thermalSource, + RawGridSource rawGridSource) { + super(csvSep, participantsFolderPath, fileNamingStrategy); + this.typeSource = typeSource; + this.rawGridSource = rawGridSource; + this.thermalSource = thermalSource; + + // init factories + this.bmInputFactory = new BmInputFactory(); + this.chpInputFactory = new ChpInputFactory(); + this.evInputFactory = new EvInputFactory(); + this.fixedFeedInInputFactory = new FixedFeedInInputFactory(); + this.hpInputFactory = new HpInputFactory(); + this.loadInputFactory = new LoadInputFactory(); + this.pvInputFactory = new PvInputFactory(); + this.storageInputFactory = new StorageInputFactory(); + this.wecInputFactory = new WecInputFactory(); + } + + /** {@inheritDoc} */ + @Override + public Optional getSystemParticipants() { + + // read all needed entities + /// start with types and operators + Set operators = typeSource.getOperators(); + Set bmTypes = typeSource.getBmTypes(); + Set chpTypes = typeSource.getChpTypes(); + Set evTypes = typeSource.getEvTypes(); + Set hpTypes = typeSource.getHpTypes(); + Set storageTypes = typeSource.getStorageTypes(); + Set wecTypes = typeSource.getWecTypes(); + + /// go on with the thermal assets + Set thermalBuses = thermalSource.getThermalBuses(operators); + Set thermalStorages = + thermalSource.getThermalStorages(operators, thermalBuses); + + /// go on with the nodes + Set nodes = rawGridSource.getNodes(operators); + + // start with the entities needed for SystemParticipants container + /// as we want to return a working grid, keep an eye on empty optionals which is equal to + // elements that + /// have been unable to be built e.g. due to missing elements they depend on + ConcurrentHashMap, LongAdder> nonBuildEntities = + new ConcurrentHashMap<>(); + + Set fixedFeedInInputs = + nodeAssetEntityStream(FixedFeedInInput.class, fixedFeedInInputFactory, nodes, operators) + .filter(isPresentCollectIfNot(FixedFeedInInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + Set pvInputs = + nodeAssetEntityStream(PvInput.class, pvInputFactory, nodes, operators) + .filter(isPresentCollectIfNot(PvInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + Set loads = + nodeAssetEntityStream(LoadInput.class, loadInputFactory, nodes, operators) + .filter(isPresentCollectIfNot(LoadInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + Set bmInputs = + typedEntityStream(BmInput.class, bmInputFactory, nodes, operators, bmTypes) + .filter(isPresentCollectIfNot(BmInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + Set storages = + typedEntityStream(StorageInput.class, storageInputFactory, nodes, operators, storageTypes) + .filter(isPresentCollectIfNot(StorageInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + Set wecInputs = + typedEntityStream(WecInput.class, wecInputFactory, nodes, operators, wecTypes) + .filter(isPresentCollectIfNot(WecInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + Set evs = + typedEntityStream(EvInput.class, evInputFactory, nodes, operators, evTypes) + .filter(isPresentCollectIfNot(EvInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + Set chpInputs = + chpInputStream(nodes, operators, chpTypes, thermalBuses, thermalStorages) + .filter(isPresentCollectIfNot(ChpInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + Set hpInputs = + hpInputStream(nodes, operators, hpTypes, thermalBuses) + .filter(isPresentCollectIfNot(HpInput.class, nonBuildEntities)) + .map(Optional::get) + .collect(Collectors.toSet()); + + // if we found invalid elements return an empty optional and log the problems + if (!nonBuildEntities.isEmpty()) { + nonBuildEntities.forEach(this::printInvalidElementInformation); + return Optional.empty(); + } + + // if everything is fine, return a system participants container + return Optional.of( + new SystemParticipants( + bmInputs, + chpInputs, + Collections.emptySet(), + evs, + fixedFeedInInputs, + hpInputs, + loads, + pvInputs, + storages, + wecInputs)); + } + + /** {@inheritDoc} */ + @Override + public Set getFixedFeedIns() { + Set operators = typeSource.getOperators(); + return getFixedFeedIns(rawGridSource.getNodes(operators), operators); + } + /** + * {@inheritDoc} + * + *

If the set of {@link NodeInput} entities is not exhaustive for all available {@link + * FixedFeedInInput} entities (e.g. a {@link NodeInput} entity is missing) or if an error during + * the building process occurs, the entity that misses something will be skipped (which can be + * seen as a filtering functionality), but all entities that are able to be built will be returned + * anyway and the elements that couldn't have been built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getFixedFeedIns(Set nodes, Set operators) { + return filterEmptyOptionals( + nodeAssetEntityStream( + FixedFeedInInput.class, fixedFeedInInputFactory, nodes, operators)) + .collect(Collectors.toSet()); + } + + /** {@inheritDoc} */ + @Override + public Set getPvPlants() { + Set operators = typeSource.getOperators(); + return getPvPlants(rawGridSource.getNodes(operators), operators); + } + + /** + * {@inheritDoc} + * + *

If the set of {@link NodeInput} entities is not exhaustive for all available {@link PvInput} + * entities (e.g. a {@link NodeInput} entity is missing) or if an error during the building + * process occurs, the entity that misses something will be skipped (which can be seen as a + * filtering functionality), but all entities that are able to be built will be returned anyway + * and the elements that couldn't have been built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getPvPlants(Set nodes, Set operators) { + return filterEmptyOptionals( + nodeAssetEntityStream(PvInput.class, pvInputFactory, nodes, operators)) + .collect(Collectors.toSet()); + } + + /** {@inheritDoc} */ + @Override + public Set getLoads() { + Set operators = typeSource.getOperators(); + return getLoads(rawGridSource.getNodes(operators), operators); + } + + /** + * {@inheritDoc} + * + *

If the set of {@link NodeInput} entities is not exhaustive for all available {@link + * LoadInput} entities (e.g. a {@link NodeInput} entity is missing) or if an error during the + * building process occurs, the entity that misses something will be skipped (which can be seen as + * a filtering functionality), but all entities that are able to be built will be returned anyway + * and the elements that couldn't have been built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getLoads(Set nodes, Set operators) { + return filterEmptyOptionals( + nodeAssetEntityStream(LoadInput.class, loadInputFactory, nodes, operators)) + .collect(Collectors.toSet()); + } + /** {@inheritDoc} */ + @Override + public Set getEvCS() { + throw new NotImplementedException("Ev Charging Stations are not implemented yet!"); + } + + /** + * {@inheritDoc} + * + *

If the set of {@link NodeInput} entities is not exhaustive for all available {@link + * EvcsInput} entities (e.g. a {@link NodeInput} entity is missing) or if an error during the + * building process occurs, the entity that misses something will be skipped (which can be seen as + * a filtering functionality), but all entities that are able to be built will be returned anyway + * and the elements that couldn't have been built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getEvCS(Set nodes, Set operators) { + throw new NotImplementedException("Ev Charging Stations are not implemented yet!"); + } + /** {@inheritDoc} */ + @Override + public Set getBmPlants() { + Set operators = typeSource.getOperators(); + return getBmPlants(rawGridSource.getNodes(operators), operators, typeSource.getBmTypes()); + } + + /** + * {@inheritDoc} + * + *

If one of the sets of {@link NodeInput} or {@link BmTypeInput} entities is not exhaustive + * for all available {@link BmInput} entities (e.g. a {@link NodeInput} or {@link BmTypeInput} + * entity is missing) or if an error during the building process occurs, the entity that misses + * something will be skipped (which can be seen as a filtering functionality) but all entities + * that are able to be built will be returned anyway and the elements that couldn't have been + * built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getBmPlants( + Set nodes, Set operators, Set types) { + return filterEmptyOptionals( + typedEntityStream(BmInput.class, bmInputFactory, nodes, operators, types)) + .collect(Collectors.toSet()); + } + /** {@inheritDoc} */ + @Override + public Set getStorages() { + Set operators = typeSource.getOperators(); + return getStorages(rawGridSource.getNodes(operators), operators, typeSource.getStorageTypes()); + } + + /** + * {@inheritDoc} + * + *

If one of the sets of {@link NodeInput} or {@link StorageTypeInput} entities is not + * exhaustive for all available {@link StorageInput} entities (e.g. a {@link NodeInput} or {@link + * StorageTypeInput} entity is missing) or if an error during the building process occurs, the + * entity that misses something will be skipped (which can be seen as a filtering functionality) + * but all entities that are able to be built will be returned anyway and the elements that + * couldn't have been built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getStorages( + Set nodes, Set operators, Set types) { + return filterEmptyOptionals( + typedEntityStream(StorageInput.class, storageInputFactory, nodes, operators, types)) + .collect(Collectors.toSet()); + } + /** {@inheritDoc} */ + @Override + public Set getWecPlants() { + Set operators = typeSource.getOperators(); + return getWecPlants(rawGridSource.getNodes(operators), operators, typeSource.getWecTypes()); + } + + /** + * {@inheritDoc} + * + *

If one of the sets of {@link NodeInput} or {@link WecTypeInput} entities is not exhaustive + * for all available {@link WecInput} entities (e.g. a {@link NodeInput} or {@link WecTypeInput} + * entity is missing) or if an error during the building process occurs, the entity that misses + * something will be skipped (which can be seen as a filtering functionality) but all entities + * that are able to be built will be returned anyway and the elements that couldn't have been + * built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getWecPlants( + Set nodes, Set operators, Set types) { + return filterEmptyOptionals( + typedEntityStream(WecInput.class, wecInputFactory, nodes, operators, types)) + .collect(Collectors.toSet()); + } + /** {@inheritDoc} */ + @Override + public Set getEvs() { + Set operators = typeSource.getOperators(); + return getEvs(rawGridSource.getNodes(operators), operators, typeSource.getEvTypes()); + } + + /** + * {@inheritDoc} + * + *

If one of the sets of {@link NodeInput} or {@link EvTypeInput} entities is not exhaustive + * for all available {@link EvInput} entities (e.g. a {@link NodeInput} or {@link EvTypeInput} + * entity is missing) or if an error during the building process occurs, the entity that misses + * something will be skipped (which can be seen as a filtering functionality) but all entities + * that are able to be built will be returned anyway and the elements that couldn't have been + * built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getEvs( + Set nodes, Set operators, Set types) { + return filterEmptyOptionals( + typedEntityStream(EvInput.class, evInputFactory, nodes, operators, types)) + .collect(Collectors.toSet()); + } + + /** + * Constructs a stream of {@link SystemParticipantInput} entities wrapped in {@link Optional}s. + * + * @param entityClass the class of the entities that should be built + * @param factory the corresponding factory that is capable of building this entities + * @param nodes the nodes that should be considered for these entities + * @param operators the operators that should be considered for these entities + * @param types the types that should be considered for these entities + * @param the type of the resulting entity + * @param the type of the type model of the resulting entity + * @return a stream of optionals being either empty or holding an instance of a {@link + * SystemParticipantInput} of the requested entity class + */ + private + Stream> typedEntityStream( + Class entityClass, + EntityFactory> factory, + Set nodes, + Set operators, + Set types) { + return buildTypedEntityData( + nodeAssetInputEntityDataStream( + assetInputEntityDataStream(entityClass, operators), nodes), + types) + .map(dataOpt -> dataOpt.flatMap(factory::getEntity)); + } + /** {@inheritDoc} */ + @Override + public Set getChpPlants() { + Set operators = typeSource.getOperators(); + Set thermalBuses = thermalSource.getThermalBuses(operators); + return getChpPlants( + rawGridSource.getNodes(operators), + operators, + typeSource.getChpTypes(), + thermalBuses, + thermalSource.getThermalStorages(operators, thermalBuses)); + } + + /** + * {@inheritDoc} + * + *

If one of the sets of {@link NodeInput}, {@link ThermalBusInput}, {@link + * ThermalStorageInput} or {@link ChpTypeInput} entities is not exhaustive for all available + * {@link ChpInput} entities (e.g. a {@link NodeInput} or {@link ChpTypeInput} entity is missing) + * or if an error during the building process occurs, the entity that misses something will be + * skipped (which can be seen as a filtering functionality) but all entities that are able to be + * built will be returned anyway and the elements that couldn't have been built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getChpPlants( + Set nodes, + Set operators, + Set types, + Set thermalBuses, + Set thermalStorages) { + + return filterEmptyOptionals( + chpInputStream(nodes, operators, types, thermalBuses, thermalStorages)) + .collect(Collectors.toSet()); + } + + private Stream> chpInputStream( + Set nodes, + Set operators, + Set types, + Set thermalBuses, + Set thermalStorages) { + return buildChpEntityData( + buildTypedEntityData( + nodeAssetInputEntityDataStream( + assetInputEntityDataStream(ChpInput.class, operators), nodes), + types), + thermalStorages, + thermalBuses) + .map(dataOpt -> dataOpt.flatMap(chpInputFactory::getEntity)); + } + /** {@inheritDoc} */ + @Override + public Set getHeatPumps() { + Set operators = typeSource.getOperators(); + return getHeatPumps( + rawGridSource.getNodes(operators), + operators, + typeSource.getHpTypes(), + thermalSource.getThermalBuses()); + } + + /** + * {@inheritDoc} + * + *

If one of the sets of {@link NodeInput}, {@link ThermalBusInput} or {@link HpTypeInput} + * entities is not exhaustive for all available {@link HpInput} entities (e.g. a {@link NodeInput} + * or {@link HpTypeInput} entity is missing) or if an error during the building process occurs, + * the entity that misses something will be skipped (which can be seen as a filtering + * functionality) but all entities that are able to be built will be returned anyway and the + * elements that couldn't have been built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getHeatPumps( + Set nodes, + Set operators, + Set types, + Set thermalBuses) { + return filterEmptyOptionals(hpInputStream(nodes, operators, types, thermalBuses)) + .collect(Collectors.toSet()); + } + + private Stream> hpInputStream( + Set nodes, + Set operators, + Set types, + Set thermalBuses) { + return buildHpEntityData( + buildTypedEntityData( + nodeAssetInputEntityDataStream( + assetInputEntityDataStream(HpInput.class, operators), nodes), + types), + thermalBuses) + .map(dataOpt -> dataOpt.flatMap(hpInputFactory::getEntity)); + } + + /** + * Enriches a given stream of {@link NodeAssetInputEntityData} optionals with a type of {@link + * SystemParticipantTypeInput} based on the provided collection of types and the fields to values + * mapping that inside the already provided {@link NodeAssetInputEntityData} instance. + * + * @param nodeAssetEntityDataStream the data stream of {@link NodeAssetInputEntityData} optionals + * @param types the types that should be used for enrichment and to build {@link + * SystemParticipantTypedEntityData} from + * @param the type of the provided entity types as well as the type parameter of the resulting + * {@link SystemParticipantTypedEntityData} + * @return a stream of optional {@link SystemParticipantTypedEntityData} instances or empty + * optionals if the type couldn't be found + */ + private + Stream>> buildTypedEntityData( + Stream> nodeAssetEntityDataStream, + Collection types) { + return nodeAssetEntityDataStream + .parallel() + .map( + nodeAssetInputEntityDataOpt -> + nodeAssetInputEntityDataOpt.flatMap( + nodeAssetInputEntityData -> + buildTypedEntityData(nodeAssetInputEntityData, types))); + } + + private + Optional> buildTypedEntityData( + NodeAssetInputEntityData nodeAssetInputEntityData, Collection types) { + return getAssetType( + types, + nodeAssetInputEntityData.getFieldsToValues(), + nodeAssetInputEntityData.getClass().getSimpleName()) + .map( + // if the optional is present, transform and return to the data, + // otherwise return an empty optional + assetType -> { + Map fieldsToAttributes = nodeAssetInputEntityData.getFieldsToValues(); + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove(TYPE); + + return new SystemParticipantTypedEntityData<>( + fieldsToAttributes, + nodeAssetInputEntityData.getEntityClass(), + nodeAssetInputEntityData.getOperatorInput(), + nodeAssetInputEntityData.getNode(), + assetType); + }); + } + + /** + * Enriches a given stream of {@link SystemParticipantTypedEntityData} optionals with a type of + * {@link ThermalBusInput} based on the provided collection of buses and the fields to values + * mapping inside the already provided {@link SystemParticipantTypedEntityData} instance. + * + * @param typedEntityDataStream the data stream of {@link SystemParticipantTypedEntityData} + * optionals + * @param thermalBuses the thermal buses that should be used for enrichment and to build {@link + * HpInputEntityData} + * @return stream of optional {@link HpInputEntityData} instances or empty optionals if they + * thermal bus couldn't be found + */ + private Stream> buildHpEntityData( + Stream>> typedEntityDataStream, + Collection thermalBuses) { + + return typedEntityDataStream + .parallel() + .map( + typedEntityDataOpt -> + typedEntityDataOpt.flatMap( + typedEntityData -> buildHpEntityData(typedEntityData, thermalBuses))); + } + + private Optional buildHpEntityData( + SystemParticipantTypedEntityData typedEntityData, + Collection thermalBuses) { + // get the raw data + Map fieldsToAttributes = typedEntityData.getFieldsToValues(); + + // get the thermal bus input for this chp unit and try to built the entity data + Optional hpInputEntityDataOpt = + Optional.ofNullable(fieldsToAttributes.get(THERMAL_BUS)) + .flatMap( + thermalBusUuid -> + thermalBuses.stream() + .filter( + storage -> + storage.getUuid().toString().equalsIgnoreCase(thermalBusUuid)) + .findFirst() + .map( + thermalBus -> { + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().remove(THERMAL_BUS); + + return new HpInputEntityData( + fieldsToAttributes, + typedEntityData.getOperatorInput(), + typedEntityData.getNode(), + typedEntityData.getTypeInput(), + thermalBus); + })); + + // if the requested entity is not present we return an empty element and + // log a warning + if (!hpInputEntityDataOpt.isPresent()) { + logSkippingWarning( + typedEntityData.getEntityClass().getSimpleName(), + saveMapGet(fieldsToAttributes, "uuid", FIELDS_TO_VALUES_MAP), + saveMapGet(fieldsToAttributes, "id", FIELDS_TO_VALUES_MAP), + "thermalBus: " + saveMapGet(fieldsToAttributes, THERMAL_BUS, FIELDS_TO_VALUES_MAP)); + } + + return hpInputEntityDataOpt; + } + + /** + * Enriches a given stream of {@link SystemParticipantTypedEntityData} optionals with a type of + * {@link ThermalBusInput} and {@link ThermalStorageInput} based on the provided collection of + * buses, storages and the fields to values mapping inside the already provided {@link + * SystemParticipantTypedEntityData} instance. + * + * @param typedEntityDataStream the data stream of {@link SystemParticipantTypedEntityData} + * optionals + * @param thermalStorages the thermal storages that should be used for enrichment and to build + * {@link ChpInputEntityData} + * @param thermalBuses the thermal buses that should be used for enrichment and to build {@link + * ChpInputEntityData} + * @return stream of optional {@link ChpInputEntityData}instances or empty optionals if they + * thermal bus couldn't be found + */ + private Stream> buildChpEntityData( + Stream>> typedEntityDataStream, + Collection thermalStorages, + Collection thermalBuses) { + + return typedEntityDataStream + .parallel() + .map( + typedEntityDataOpt -> + typedEntityDataOpt.flatMap( + typedEntityData -> + buildChpEntityData(typedEntityData, thermalStorages, thermalBuses))); + } + + private Optional buildChpEntityData( + SystemParticipantTypedEntityData typedEntityData, + Collection thermalStorages, + Collection thermalBuses) { + + // get the raw data + Map fieldsToAttributes = typedEntityData.getFieldsToValues(); + + // get the thermal storage input for this chp unit + Optional thermalStorage = + Optional.ofNullable(fieldsToAttributes.get(THERMAL_STORAGE)) + .flatMap( + thermalStorageUuid -> findFirstEntityByUuid(thermalStorageUuid, thermalStorages)); + + // get the thermal bus input for this chp unit + Optional thermalBus = + Optional.ofNullable(fieldsToAttributes.get("thermalBus")) + .flatMap(thermalBusUuid -> findFirstEntityByUuid(thermalBusUuid, thermalBuses)); + + // if the thermal storage or the thermal bus are not present we return an + // empty element and log a warning + if (!thermalStorage.isPresent() || !thermalBus.isPresent()) { + StringBuilder sB = new StringBuilder(); + if (!thermalStorage.isPresent()) { + sB.append("thermalStorage: ") + .append(saveMapGet(fieldsToAttributes, THERMAL_STORAGE, FIELDS_TO_VALUES_MAP)); + } + if (!thermalBus.isPresent()) { + sB.append("\nthermalBus: ") + .append(saveMapGet(fieldsToAttributes, THERMAL_BUS, FIELDS_TO_VALUES_MAP)); + } + + logSkippingWarning( + typedEntityData.getEntityClass().getSimpleName(), + saveMapGet(fieldsToAttributes, "uuid", FIELDS_TO_VALUES_MAP), + saveMapGet(fieldsToAttributes, "id", FIELDS_TO_VALUES_MAP), + sB.toString()); + + return Optional.empty(); + } + + // remove fields that are passed as objects to constructor + fieldsToAttributes + .keySet() + .removeAll(new HashSet<>(Arrays.asList("thermalBus", "thermalStorage"))); + + return Optional.of( + new ChpInputEntityData( + fieldsToAttributes, + typedEntityData.getOperatorInput(), + typedEntityData.getNode(), + typedEntityData.getTypeInput(), + thermalBus.get(), + thermalStorage.get())); + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvThermalSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvThermalSource.java new file mode 100644 index 000000000..95378fe7d --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvThermalSource.java @@ -0,0 +1,211 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.source.csv; + +import edu.ie3.datamodel.io.FileNamingStrategy; +import edu.ie3.datamodel.io.factory.input.*; +import edu.ie3.datamodel.io.source.ThermalSource; +import edu.ie3.datamodel.io.source.TypeSource; +import edu.ie3.datamodel.models.input.OperatorInput; +import edu.ie3.datamodel.models.input.thermal.*; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * Source that provides the capability to build thermal {@link + * edu.ie3.datamodel.models.input.AssetInput} entities from .csv files + * + *

This source is not buffered which means each call on a getter method always tries to + * read all data is necessary to return the requested objects in a hierarchical cascading way. + * + *

If performance is an issue, it is recommended to read the data cascading starting with reading + * nodes and then using the getters with arguments to avoid reading the same data multiple times. + * + *

The resulting sets are always unique on object and UUID base (with distinct UUIDs). + * + * @version 0.1 + * @since 03.04.20 + */ +public class CsvThermalSource extends CsvDataSource implements ThermalSource { + + // general fields + private final TypeSource typeSource; + + // factories + private final ThermalBusInputFactory thermalBusInputFactory; + private final CylindricalStorageInputFactory cylindricalStorageInputFactory; + private final ThermalHouseInputFactory thermalHouseInputFactory; + + public CsvThermalSource( + String csvSep, + String thermalUnitsFolderPath, + FileNamingStrategy fileNamingStrategy, + TypeSource typeSource) { + super(csvSep, thermalUnitsFolderPath, fileNamingStrategy); + this.typeSource = typeSource; + + // init factories + this.thermalBusInputFactory = new ThermalBusInputFactory(); + this.cylindricalStorageInputFactory = new CylindricalStorageInputFactory(); + this.thermalHouseInputFactory = new ThermalHouseInputFactory(); + } + /** {@inheritDoc} */ + @Override + public Set getThermalBuses() { + return filterEmptyOptionals( + assetInputEntityDataStream(ThermalBusInput.class, typeSource.getOperators()) + .map(thermalBusInputFactory::getEntity)) + .collect(Collectors.toSet()); + } + + /** + * {@inheritDoc} + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getThermalBuses(Set operators) { + return filterEmptyOptionals( + assetInputEntityDataStream(ThermalBusInput.class, operators) + .map(thermalBusInputFactory::getEntity)) + .collect(Collectors.toSet()); + } + /** {@inheritDoc} */ + @Override + public Set getThermalStorages() { + return new HashSet<>(getCylindricStorages()); + } + + /** + * {@inheritDoc} + * + *

If the set of {@link ThermalBusInput} entities is not exhaustive for all available {@link + * ThermalStorageInput} entities (e.g. a {@link ThermalBusInput} entity is missing) or if an error + * during the building process occurs, the entity that misses something will be skipped (which can + * be seen as a filtering functionality) but all entities that are able to be built will be + * returned anyway and the elements that couldn't have been built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getThermalStorages( + Set operators, Set thermalBuses) { + return new HashSet<>(getCylindricStorages(operators, thermalBuses)); + } + /** {@inheritDoc} */ + @Override + public Set getThermalHouses() { + + return (assetInputEntityDataStream(ThermalHouseInput.class, typeSource.getOperators()) + .map( + assetInputEntityData -> + buildThermalUnitInputEntityData(assetInputEntityData, getThermalBuses()) + .map(dataOpt -> dataOpt.flatMap(thermalHouseInputFactory::getEntity))) + .flatMap(this::filterEmptyOptionals) + .collect(Collectors.toSet())); + } + + /** + * {@inheritDoc} + * + *

If the set of {@link ThermalBusInput} entities is not exhaustive for all available {@link + * ThermalHouseInput} entities (e.g. a {@link ThermalBusInput} entity is missing) or if an error + * during the building process occurs, the entity that misses something will be skipped (which can + * be seen as a filtering functionality) but all entities that are able to be built will be + * returned anyway and the elements that couldn't have been built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getThermalHouses( + Set operators, Set thermalBuses) { + + return (assetInputEntityDataStream(ThermalHouseInput.class, operators) + .map( + assetInputEntityData -> + buildThermalUnitInputEntityData(assetInputEntityData, thermalBuses) + .map(dataOpt -> dataOpt.flatMap(thermalHouseInputFactory::getEntity))) + .flatMap(this::filterEmptyOptionals) + .collect(Collectors.toSet())); + } + /** {@inheritDoc} */ + @Override + public Set getCylindricStorages() { + + return (assetInputEntityDataStream(CylindricalStorageInput.class, typeSource.getOperators()) + .map( + assetInputEntityData -> + buildThermalUnitInputEntityData(assetInputEntityData, getThermalBuses()) + .map(dataOpt -> dataOpt.flatMap(cylindricalStorageInputFactory::getEntity))) + .flatMap(this::filterEmptyOptionals) + .collect(Collectors.toSet())); + } + + /** + * {@inheritDoc} + * + *

If the set of {@link ThermalBusInput} entities is not exhaustive for all available {@link + * CylindricalStorageInput} entities (e.g. a {@link ThermalBusInput} entity is missing) or if an + * error during the building process occurs, the entity that misses something will be skipped + * (which can be seen as a filtering functionality) but all entities that are able to be built + * will be returned anyway and the elements that couldn't have been built are logged. + * + *

If the set with {@link OperatorInput} is not exhaustive, the corresponding operator is set + * to {@link OperatorInput#NO_OPERATOR_ASSIGNED} + */ + @Override + public Set getCylindricStorages( + Set operators, Set thermalBuses) { + + return (assetInputEntityDataStream(CylindricalStorageInput.class, operators) + .map( + assetInputEntityData -> + buildThermalUnitInputEntityData(assetInputEntityData, thermalBuses) + .map(dataOpt -> dataOpt.flatMap(cylindricalStorageInputFactory::getEntity))) + .flatMap(this::filterEmptyOptionals) + .collect(Collectors.toSet())); + } + + private Stream> buildThermalUnitInputEntityData( + AssetInputEntityData assetInputEntityData, Collection thermalBuses) { + + // get the raw data + Map fieldsToAttributes = assetInputEntityData.getFieldsToValues(); + + // get the thermal bus input for this chp unit + String thermalBusUuid = fieldsToAttributes.get("thermalbus"); + Optional thermalBus = + thermalBuses.stream() + .filter(storage -> storage.getUuid().toString().equalsIgnoreCase(thermalBusUuid)) + .findFirst(); + + // remove fields that are passed as objects to constructor + fieldsToAttributes.keySet().removeAll(new HashSet<>(Collections.singletonList("thermalbus"))); + + // if the type is not present we return an empty element and + // log a warning + if (!thermalBus.isPresent()) { + logSkippingWarning( + assetInputEntityData.getEntityClass().getSimpleName(), + fieldsToAttributes.get("uuid"), + fieldsToAttributes.get("id"), + "thermalBus: " + thermalBusUuid); + return Stream.of(Optional.empty()); + } + + return Stream.of( + Optional.of( + new ThermalUnitInputEntityData( + assetInputEntityData.getFieldsToValues(), + assetInputEntityData.getEntityClass(), + assetInputEntityData.getOperatorInput(), + thermalBus.get()))); + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTypeSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTypeSource.java new file mode 100644 index 000000000..afa2600a9 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTypeSource.java @@ -0,0 +1,133 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.source.csv; + +import edu.ie3.datamodel.io.FileNamingStrategy; +import edu.ie3.datamodel.io.factory.EntityFactory; +import edu.ie3.datamodel.io.factory.SimpleEntityData; +import edu.ie3.datamodel.io.factory.input.OperatorInputFactory; +import edu.ie3.datamodel.io.factory.typeinput.LineTypeInputFactory; +import edu.ie3.datamodel.io.factory.typeinput.SystemParticipantTypeInputFactory; +import edu.ie3.datamodel.io.factory.typeinput.Transformer2WTypeInputFactory; +import edu.ie3.datamodel.io.factory.typeinput.Transformer3WTypeInputFactory; +import edu.ie3.datamodel.io.source.TypeSource; +import edu.ie3.datamodel.models.input.InputEntity; +import edu.ie3.datamodel.models.input.OperatorInput; +import edu.ie3.datamodel.models.input.connector.type.LineTypeInput; +import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput; +import edu.ie3.datamodel.models.input.connector.type.Transformer3WTypeInput; +import edu.ie3.datamodel.models.input.system.type.*; +import java.util.*; +import java.util.stream.Collectors; + +/** + * Source that provides the capability to build entities of type {@link SystemParticipantTypeInput} + * and {@link OperatorInput} from .csv files + * + * @version 0.1 + * @since 05.04.20 + */ +public class CsvTypeSource extends CsvDataSource implements TypeSource { + + // factories + private final OperatorInputFactory operatorInputFactory; + private final Transformer2WTypeInputFactory transformer2WTypeInputFactory; + private final LineTypeInputFactory lineTypeInputFactory; + private final Transformer3WTypeInputFactory transformer3WTypeInputFactory; + private final SystemParticipantTypeInputFactory systemParticipantTypeInputFactory; + + public CsvTypeSource( + String csvSep, String typeFolderPath, FileNamingStrategy fileNamingStrategy) { + super(csvSep, typeFolderPath, fileNamingStrategy); + + // init factories + operatorInputFactory = new OperatorInputFactory(); + transformer2WTypeInputFactory = new Transformer2WTypeInputFactory(); + lineTypeInputFactory = new LineTypeInputFactory(); + transformer3WTypeInputFactory = new Transformer3WTypeInputFactory(); + systemParticipantTypeInputFactory = new SystemParticipantTypeInputFactory(); + } + /** {@inheritDoc} */ + @Override + public Set getTransformer2WTypes() { + return buildSimpleEntities(Transformer2WTypeInput.class, transformer2WTypeInputFactory); + } + /** {@inheritDoc} */ + @Override + public Set getOperators() { + return buildSimpleEntities(OperatorInput.class, operatorInputFactory); + } + /** {@inheritDoc} */ + @Override + public Set getLineTypes() { + return buildSimpleEntities(LineTypeInput.class, lineTypeInputFactory); + } + /** {@inheritDoc} */ + @Override + public Set getTransformer3WTypes() { + return buildSimpleEntities(Transformer3WTypeInput.class, transformer3WTypeInputFactory); + } + /** {@inheritDoc} */ + @Override + public Set getBmTypes() { + return buildSimpleEntities(BmTypeInput.class, systemParticipantTypeInputFactory); + } + /** {@inheritDoc} */ + @Override + public Set getChpTypes() { + return buildSimpleEntities(ChpTypeInput.class, systemParticipantTypeInputFactory); + } + /** {@inheritDoc} */ + @Override + public Set getHpTypes() { + return buildSimpleEntities(HpTypeInput.class, systemParticipantTypeInputFactory); + } + /** {@inheritDoc} */ + @Override + public Set getStorageTypes() { + return buildSimpleEntities(StorageTypeInput.class, systemParticipantTypeInputFactory); + } + /** {@inheritDoc} */ + @Override + public Set getWecTypes() { + return buildSimpleEntities(WecTypeInput.class, systemParticipantTypeInputFactory); + } + /** {@inheritDoc} */ + @Override + public Set getEvTypes() { + return buildSimpleEntities(EvTypeInput.class, systemParticipantTypeInputFactory); + } + + /** + * Tries to build a set of {@link InputEntity}s of the provided entity class based on the provided + * factory. To do so, first entity data of type {@link SimpleEntityData} is constructed based on + * the input .csv file that can be derived from the entity class. This data is than passed to the + * factory and used to build the corresponding entities. + * + *

Be careful, that always a factory that is able to produce an entity of type is passed + * into as argument. Otherwise, a casting exception will be thrown. + * + * @param entityClass the concrete class of the {@link InputEntity} that should be built + * @param factory the entity factory that should be used + * @param the type of the resulting entity + * @return a set containing all entities that could have been built or an empty set if no entity + * could been built + */ + @SuppressWarnings("unchecked cast") + private Set buildSimpleEntities( + Class entityClass, EntityFactory factory) { + return (Set) + buildStreamWithFieldsToAttributesMap(entityClass, connector) + .map( + fieldsToAttributes -> { + SimpleEntityData data = new SimpleEntityData(fieldsToAttributes, entityClass); + return factory.getEntity(data); + }) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toSet()); + } +} diff --git a/src/main/java/edu/ie3/datamodel/models/input/NodeInput.java b/src/main/java/edu/ie3/datamodel/models/input/NodeInput.java index 6bfd23c22..4cc6f10b9 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/NodeInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/NodeInput.java @@ -30,12 +30,13 @@ public class NodeInput extends AssetInput { /** Use this default value if geoPosition is unknown */ public static final Point DEFAULT_GEO_POSITION = - GeoUtils.DEFAULT_GEOMETRY_FACTORY.createPoint(new Coordinate(51.4843281, 7.4116482)); + GeoUtils.DEFAULT_GEOMETRY_FACTORY.createPoint(new Coordinate(7.4116482, 51.4843281)); /** Voltage level of this node */ private final VoltageLevel voltLvl; /** Subnet of this node */ private final int subnet; + /** * Constructor for an operated node * @@ -137,7 +138,16 @@ public int hashCode() { @Override public String toString() { return "NodeInput{" - + "vTarget=" + + "uuid=" + + getUuid() + + ", id='" + + getId() + + '\'' + + ", operator=" + + getOperator() + + ", operationTime=" + + getOperationTime() + + ", vTarget=" + vTarget + ", slack=" + slack diff --git a/src/main/java/edu/ie3/datamodel/models/input/OperatorInput.java b/src/main/java/edu/ie3/datamodel/models/input/OperatorInput.java index be4e33671..eec40eed3 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/OperatorInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/OperatorInput.java @@ -43,6 +43,6 @@ public int hashCode() { @Override public String toString() { - return "OperatorInput{" + "id='" + id + '\'' + '}'; + return "OperatorInput{" + "uuid=" + getUuid() + ", id='" + id + '\'' + '}'; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/connector/ConnectorInput.java b/src/main/java/edu/ie3/datamodel/models/input/connector/ConnectorInput.java index 558a22d74..ca5cd8b2a 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/connector/ConnectorInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/connector/ConnectorInput.java @@ -19,7 +19,7 @@ public abstract class ConnectorInput extends AssetInput implements HasNodes { /** Grid node at the other side of the connector */ private final NodeInput nodeB; /** Amount of parallelDevices */ - private final int noOfParallelDevices; + private final int parallelDevices; /** * Constructor for an operated connector @@ -30,7 +30,7 @@ public abstract class ConnectorInput extends AssetInput implements HasNodes { * @param operationTime Time for which the entity is operated * @param nodeA Grid node at one side of the connector * @param nodeB Grid node at the other side of the connector - * @param noOfParallelDevices Amount of parallel devices + * @param parallelDevices Amount of parallel devices */ public ConnectorInput( UUID uuid, @@ -39,11 +39,11 @@ public ConnectorInput( OperationTime operationTime, NodeInput nodeA, NodeInput nodeB, - int noOfParallelDevices) { + int parallelDevices) { super(uuid, id, operator, operationTime); this.nodeA = nodeA; this.nodeB = nodeB; - this.noOfParallelDevices = noOfParallelDevices; + this.parallelDevices = parallelDevices; } /** @@ -53,14 +53,14 @@ public ConnectorInput( * @param id of the asset * @param nodeA Grid node at one side of the connector * @param nodeB Grid node at the other side of the connector - * @param noOfParallelDevices Amount of parallel devices + * @param parallelDevices Amount of parallel devices */ public ConnectorInput( - UUID uuid, String id, NodeInput nodeA, NodeInput nodeB, int noOfParallelDevices) { + UUID uuid, String id, NodeInput nodeA, NodeInput nodeB, int parallelDevices) { super(uuid, id); this.nodeA = nodeA; this.nodeB = nodeB; - this.noOfParallelDevices = noOfParallelDevices; + this.parallelDevices = parallelDevices; } public NodeInput getNodeA() { @@ -76,8 +76,8 @@ public List allNodes() { return Collections.unmodifiableList(Arrays.asList(getNodeA(), getNodeB())); } - public int getNoOfParallelDevices() { - return noOfParallelDevices; + public int getParallelDevices() { + return parallelDevices; } @Override @@ -86,14 +86,14 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; if (!super.equals(o)) return false; ConnectorInput that = (ConnectorInput) o; - return noOfParallelDevices == that.noOfParallelDevices + return parallelDevices == that.parallelDevices && nodeA.equals(that.nodeA) && nodeB.equals(that.nodeB); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), nodeA, nodeB, noOfParallelDevices); + return Objects.hash(super.hashCode(), nodeA, nodeB, parallelDevices); } @Override @@ -104,7 +104,7 @@ public String toString() { + ", nodeB=" + nodeB + ", noOfParallelDevices=" - + noOfParallelDevices + + parallelDevices + '}'; } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/container/GraphicElements.java b/src/main/java/edu/ie3/datamodel/models/input/container/GraphicElements.java index 3d7ee3522..c734ecfe5 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/container/GraphicElements.java +++ b/src/main/java/edu/ie3/datamodel/models/input/container/GraphicElements.java @@ -5,9 +5,11 @@ */ package edu.ie3.datamodel.models.input.container; +import edu.ie3.datamodel.exceptions.InvalidGridException; import edu.ie3.datamodel.models.UniqueEntity; import edu.ie3.datamodel.models.input.graphics.LineGraphicInput; import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput; +import edu.ie3.datamodel.utils.ValidationUtils; import java.util.*; import java.util.stream.Collectors; @@ -36,6 +38,18 @@ public GraphicElements(Collection graphicElements) { graphicElements.stream() .flatMap(graphics -> graphics.lineGraphics.stream()) .collect(Collectors.toSet()); + + // sanity check for distinct uuids + Optional exceptionString = + ValidationUtils.checkForDuplicateUuids(new HashSet<>(this.allEntitiesAsList())); + if (exceptionString.isPresent()) { + throw new InvalidGridException( + "The provided entities in '" + + this.getClass().getSimpleName() + + "' contains duplicate UUIDs. " + + "This is not allowed!\nDuplicated uuids:\n\n" + + exceptionString); + } } @Override diff --git a/src/main/java/edu/ie3/datamodel/models/input/container/GridContainer.java b/src/main/java/edu/ie3/datamodel/models/input/container/GridContainer.java index 54ff276c0..0444c2056 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/container/GridContainer.java +++ b/src/main/java/edu/ie3/datamodel/models/input/container/GridContainer.java @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.models.input.container; +import edu.ie3.datamodel.exceptions.InvalidGridException; import edu.ie3.datamodel.models.UniqueEntity; import edu.ie3.datamodel.utils.ValidationUtils; import java.util.*; @@ -43,6 +44,18 @@ public List allEntitiesAsList() { @Override public void validate() { + // sanity check to ensure distinct UUIDs + Optional exceptionString = + ValidationUtils.checkForDuplicateUuids(new HashSet<>(this.allEntitiesAsList())); + if (exceptionString.isPresent()) { + throw new InvalidGridException( + "The provided entities in '" + + this.getClass().getSimpleName() + + "' contains duplicate UUIDs. " + + "This is not allowed!\nDuplicated uuids:\n\n" + + exceptionString); + } + ValidationUtils.checkGrid(this); } diff --git a/src/main/java/edu/ie3/datamodel/models/input/container/RawGridElements.java b/src/main/java/edu/ie3/datamodel/models/input/container/RawGridElements.java index 117081556..c20446e52 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/container/RawGridElements.java +++ b/src/main/java/edu/ie3/datamodel/models/input/container/RawGridElements.java @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.models.input.container; +import edu.ie3.datamodel.exceptions.InvalidGridException; import edu.ie3.datamodel.models.UniqueEntity; import edu.ie3.datamodel.models.input.MeasurementUnitInput; import edu.ie3.datamodel.models.input.NodeInput; @@ -44,6 +45,18 @@ public RawGridElements( this.transformer3Ws = transformer3Ws; this.switches = switches; this.measurementUnits = measurementUnits; + + // sanity check to ensure distinct UUIDs + Optional exceptionString = + ValidationUtils.checkForDuplicateUuids(new HashSet<>(this.allEntitiesAsList())); + if (exceptionString.isPresent()) { + throw new InvalidGridException( + "The provided entities in '" + + this.getClass().getSimpleName() + + "' contains duplicate UUIDs. " + + "This is not allowed!\nDuplicated uuids:\n\n" + + exceptionString); + } } /** diff --git a/src/main/java/edu/ie3/datamodel/models/input/container/SubGridContainer.java b/src/main/java/edu/ie3/datamodel/models/input/container/SubGridContainer.java index b5bb15acf..338303dda 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/container/SubGridContainer.java +++ b/src/main/java/edu/ie3/datamodel/models/input/container/SubGridContainer.java @@ -5,12 +5,13 @@ */ package edu.ie3.datamodel.models.input.container; -import edu.ie3.datamodel.exceptions.InvalidGridException; import edu.ie3.datamodel.models.voltagelevels.VoltageLevel; import edu.ie3.datamodel.utils.ContainerUtils; import java.util.Objects; -/** Represents the accumulation of all data needed to create a complete single grid */ +/** + * Represents the accumulation of all data needed to create one galvanically complete single grid + */ public class SubGridContainer extends GridContainer { /** subnet number of this grid */ private final int subnet; @@ -25,17 +26,7 @@ public SubGridContainer( GraphicElements graphics) { super(gridName, rawGrid, systemParticipants, graphics); this.subnet = subnet; - - try { - this.predominantVoltageLevel = ContainerUtils.determinePredominantVoltLvl(rawGrid, subnet); - } catch (InvalidGridException e) { - throw new InvalidGridException( - "Cannot build sub grid model for (" - + gridName - + ", " - + subnet - + "), as the predominant voltage level cannot be determined."); - } + this.predominantVoltageLevel = ContainerUtils.determinePredominantVoltLvl(rawGrid, subnet); } public int getSubnet() { diff --git a/src/main/java/edu/ie3/datamodel/models/input/container/SystemParticipants.java b/src/main/java/edu/ie3/datamodel/models/input/container/SystemParticipants.java index 16ec6216b..bb6293f32 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/container/SystemParticipants.java +++ b/src/main/java/edu/ie3/datamodel/models/input/container/SystemParticipants.java @@ -5,9 +5,11 @@ */ package edu.ie3.datamodel.models.input.container; +import edu.ie3.datamodel.exceptions.InvalidGridException; import edu.ie3.datamodel.models.UniqueEntity; import edu.ie3.datamodel.models.input.EvcsInput; import edu.ie3.datamodel.models.input.system.*; +import edu.ie3.datamodel.utils.ValidationUtils; import java.util.*; import java.util.stream.Collectors; @@ -19,6 +21,7 @@ public class SystemParticipants implements InputContainer { private final Set bmPlants; private final Set chpPlants; private final Set evCS; + private final Set evs; private final Set fixedFeedIns; private final Set heatPumps; private final Set loads; @@ -30,6 +33,7 @@ public SystemParticipants( Set bmPlants, Set chpPlants, Set evCS, + Set evs, Set fixedFeedIns, Set heatPumps, Set loads, @@ -39,12 +43,25 @@ public SystemParticipants( this.bmPlants = bmPlants; this.chpPlants = chpPlants; this.evCS = evCS; + this.evs = evs; this.fixedFeedIns = fixedFeedIns; this.heatPumps = heatPumps; this.loads = loads; this.pvPlants = pvPlants; this.storages = storages; this.wecPlants = wecPlants; + + // sanity check for distinct uuids + Optional exceptionString = + ValidationUtils.checkForDuplicateUuids(new HashSet<>(this.allEntitiesAsList())); + if (exceptionString.isPresent()) { + throw new InvalidGridException( + "The provided entities in '" + + this.getClass().getSimpleName() + + "' contains duplicate UUIDs. " + + "This is not allowed!\nDuplicated uuids:\n\n" + + exceptionString); + } } /** @@ -65,6 +82,10 @@ public SystemParticipants(Collection systemParticipants) { systemParticipants.stream() .flatMap(participants -> participants.evCS.stream()) .collect(Collectors.toSet()); + this.evs = + systemParticipants.stream() + .flatMap(participants -> participants.evs.stream()) + .collect(Collectors.toSet()); this.fixedFeedIns = systemParticipants.stream() .flatMap(participants -> participants.fixedFeedIns.stream()) @@ -97,6 +118,7 @@ public List allEntitiesAsList() { allEntities.addAll(bmPlants); allEntities.addAll(chpPlants); allEntities.addAll(evCS); + allEntities.addAll(evs); allEntities.addAll(fixedFeedIns); allEntities.addAll(heatPumps); allEntities.addAll(loads); @@ -112,74 +134,51 @@ public void validate() { "Currently there are no tests for system participants in ValidationUtils."); } - public void add(BmInput bm) { - bmPlants.add(bm); - } - - public void add(ChpInput chp) { - chpPlants.add(chp); - } - - public void add(EvcsInput evcsInput) { - evCS.add(evcsInput); - } - - public void add(FixedFeedInInput fixedFeedIn) { - fixedFeedIns.add(fixedFeedIn); - } - - public void add(HpInput hp) { - heatPumps.add(hp); - } - - public void add(LoadInput load) { - loads.add(load); - } - - public void add(PvInput pv) { - pvPlants.add(pv); - } - - public void add(StorageInput storage) { - this.storages.add(storage); - } - - public void add(WecInput wec) { - wecPlants.add(wec); - } - /** @return unmodifiable Set of all biomass plants in this grid */ public Set getBmPlants() { return Collections.unmodifiableSet(bmPlants); } + /** @return unmodifiable Set of all CHP plants in this grid */ public Set getChpPlants() { return Collections.unmodifiableSet(chpPlants); } + /** @return unmodifiable Set of all ev charging stations in this grid */ public Set getEvCS() { return Collections.unmodifiableSet(evCS); } + + /** @return unmodifiable Set of all electric vehicles in this grid */ + public Set getEvs() { + return evs; + } + /** @return unmodifiable Set of all fixed feed in in this grid */ public Set getFixedFeedIns() { return Collections.unmodifiableSet(fixedFeedIns); } + /** @return unmodifiable Set of all heat pumps in this grid */ public Set getHeatPumps() { return Collections.unmodifiableSet(heatPumps); } + /** @return unmodifiable Set of all loads in this grid */ public Set getLoads() { return Collections.unmodifiableSet(loads); } + /** @return unmodifiable Set of all PV plants in this grid */ public Set getPvPlants() { return Collections.unmodifiableSet(pvPlants); } + /** @return unmodifiable Set of all storages in this grid */ public Set getStorages() { return Collections.unmodifiableSet(storages); } + /** @return unmodifiable Set of all WECs in this grid */ public Set getWecPlants() { return Collections.unmodifiableSet(wecPlants); @@ -190,20 +189,30 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SystemParticipants that = (SystemParticipants) o; - return bmPlants.equals(that.bmPlants) - && chpPlants.equals(that.chpPlants) - && evCS.equals(that.evCS) - && fixedFeedIns.equals(that.fixedFeedIns) - && heatPumps.equals(that.heatPumps) - && loads.equals(that.loads) - && pvPlants.equals(that.pvPlants) - && storages.equals(that.storages) - && wecPlants.equals(that.wecPlants); + return Objects.equals(bmPlants, that.bmPlants) + && Objects.equals(chpPlants, that.chpPlants) + && Objects.equals(evCS, that.evCS) + && Objects.equals(evs, that.evs) + && Objects.equals(fixedFeedIns, that.fixedFeedIns) + && Objects.equals(heatPumps, that.heatPumps) + && Objects.equals(loads, that.loads) + && Objects.equals(pvPlants, that.pvPlants) + && Objects.equals(storages, that.storages) + && Objects.equals(wecPlants, that.wecPlants); } @Override public int hashCode() { return Objects.hash( - bmPlants, chpPlants, evCS, fixedFeedIns, heatPumps, loads, pvPlants, storages, wecPlants); + bmPlants, + chpPlants, + evCS, + evs, + fixedFeedIns, + heatPumps, + loads, + pvPlants, + storages, + wecPlants); } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/ChpInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/ChpInput.java index a1809917a..bed90e9b4 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/ChpInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/ChpInput.java @@ -5,6 +5,8 @@ */ package edu.ie3.datamodel.models.input.system; +import edu.ie3.datamodel.io.extractor.HasThermalBus; +import edu.ie3.datamodel.io.extractor.HasThermalStorage; import edu.ie3.datamodel.io.extractor.HasType; import edu.ie3.datamodel.models.OperationTime; import edu.ie3.datamodel.models.input.NodeInput; @@ -17,7 +19,8 @@ import java.util.UUID; /** Describes a combined heat and power plant */ -public class ChpInput extends SystemParticipantInput implements HasType { +public class ChpInput extends SystemParticipantInput + implements HasType, HasThermalBus, HasThermalStorage { /** The thermal bus, this model is connected to */ private final ThermalBusInput thermalBus; /** Type of this CHP plant, containing default values for CHP plants of this kind */ @@ -89,6 +92,7 @@ public ChpInput( this.marketReaction = marketReaction; } + @Override public ThermalBusInput getThermalBus() { return thermalBus; } @@ -98,6 +102,7 @@ public ChpTypeInput getType() { return type; } + @Override public ThermalStorageInput getThermalStorage() { return thermalStorage; } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/HpInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/HpInput.java index 0e59e02c1..d4164fa10 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/HpInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/HpInput.java @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.models.input.system; +import edu.ie3.datamodel.io.extractor.HasThermalBus; import edu.ie3.datamodel.io.extractor.HasType; import edu.ie3.datamodel.models.OperationTime; import edu.ie3.datamodel.models.input.NodeInput; @@ -16,7 +17,7 @@ import java.util.UUID; /** Describes a heat pump */ -public class HpInput extends SystemParticipantInput implements HasType { +public class HpInput extends SystemParticipantInput implements HasType, HasThermalBus { /** Type of this heat pump, containing default values for heat pump of this kind */ private final HpTypeInput type; /** The thermal bus, this model is connected to */ @@ -75,6 +76,7 @@ public HpTypeInput getType() { return type; } + @Override public ThermalBusInput getThermalBus() { return thermalBus; } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/CharacteristicInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/CharacteristicInput.java index 110317df9..952aa2382 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/CharacteristicInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/CharacteristicInput.java @@ -129,14 +129,15 @@ public String deSerialize() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; CharacteristicInput that = (CharacteristicInput) o; - return points.equals(that.points); + return decimalPlaces == that.decimalPlaces + && characteristicPrefix.equals(that.characteristicPrefix) + && points.equals(that.points); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), points); + return Objects.hash(characteristicPrefix, decimalPlaces, points); } @Override diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/CosPhiFixed.java b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/CosPhiFixed.java index 3ae7ee020..fc61794a4 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/CosPhiFixed.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/CosPhiFixed.java @@ -40,6 +40,16 @@ private static CosPhiFixed buildConstantCharacteristic() { return new CosPhiFixed(unmodifiableSortedSet(points)); } + @Override + public boolean equals(Object o) { + return super.equals(o); + } + + @Override + public int hashCode() { + return super.hashCode(); + } + @Override public String toString() { return "cosPhiFixed{" + "points=" + points + '}'; diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/CosPhiP.java b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/CosPhiP.java index 339723120..8dac1fe96 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/CosPhiP.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/CosPhiP.java @@ -27,6 +27,16 @@ public CosPhiP(String input) throws ParsingException { super(input, StandardUnits.Q_CHARACTERISTIC, StandardUnits.Q_CHARACTERISTIC, PREFIX, 2); } + @Override + public boolean equals(Object o) { + return super.equals(o); + } + + @Override + public int hashCode() { + return super.hashCode(); + } + @Override public String toString() { return "CosPhiP{" + "points=" + points + '}'; diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/EvCharacteristicInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/EvCharacteristicInput.java index b1dee9b41..1fd26ec4b 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/EvCharacteristicInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/EvCharacteristicInput.java @@ -29,6 +29,16 @@ public EvCharacteristicInput(String input) throws ParsingException { super(input, StandardUnits.ACTIVE_POWER_IN, StandardUnits.EV_CHARACTERISTIC, "ev", 2); } + @Override + public boolean equals(Object o) { + return super.equals(o); + } + + @Override + public int hashCode() { + return super.hashCode(); + } + @Override public String toString() { return "EvCharacteristicInput{" + "points=" + points + '}'; diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/OlmCharacteristicInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/OlmCharacteristicInput.java index fc3946306..c0d2389d8 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/OlmCharacteristicInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/OlmCharacteristicInput.java @@ -39,6 +39,16 @@ private static OlmCharacteristicInput buildConstantCharacteristic() { return new OlmCharacteristicInput(unmodifiableSortedSet(points)); } + @Override + public boolean equals(Object o) { + return super.equals(o); + } + + @Override + public int hashCode() { + return super.hashCode(); + } + @Override public String toString() { return "OlmCharacteristicInput{" + "points=" + points + '}'; diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/QV.java b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/QV.java index 4178d0959..e6ee54d64 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/QV.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/QV.java @@ -26,6 +26,16 @@ public QV(String input) throws ParsingException { super(input, StandardUnits.VOLTAGE_MAGNITUDE, StandardUnits.Q_CHARACTERISTIC, PREFIX, 2); } + @Override + public boolean equals(Object o) { + return super.equals(o); + } + + @Override + public int hashCode() { + return super.hashCode(); + } + @Override public String toString() { return "QV{" + "points=" + points + '}'; diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/ReactivePowerCharacteristic.java b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/ReactivePowerCharacteristic.java index de81e99ac..bde993ad4 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/ReactivePowerCharacteristic.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/ReactivePowerCharacteristic.java @@ -47,4 +47,14 @@ public static ReactivePowerCharacteristic parse(String input) throws ParsingExce + input + "' to a reactive power characteristic, as it does not meet the specifications of any of the available classes."); } + + @Override + public boolean equals(Object o) { + return super.equals(o); + } + + @Override + public int hashCode() { + return super.hashCode(); + } } diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/WecCharacteristicInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/WecCharacteristicInput.java index af1ada0b2..699d61c89 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/WecCharacteristicInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/characteristic/WecCharacteristicInput.java @@ -22,6 +22,16 @@ public WecCharacteristicInput(String input) throws ParsingException { super(input, StandardUnits.WIND_VELOCITY, StandardUnits.CP_CHARACTERISTIC, "cP", 2); } + @Override + public boolean equals(Object o) { + return super.equals(o); + } + + @Override + public int hashCode() { + return super.hashCode(); + } + @Override public String toString() { return "WecCharacteristicInput{" + "points=" + points + '}'; diff --git a/src/main/java/edu/ie3/datamodel/models/input/system/type/WecTypeInput.java b/src/main/java/edu/ie3/datamodel/models/input/system/type/WecTypeInput.java index efb7de0cb..249dc08b6 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/system/type/WecTypeInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/system/type/WecTypeInput.java @@ -33,7 +33,7 @@ public class WecTypeInput extends SystemParticipantTypeInput { * @param id of this type of WEC * @param capex Captial expense for this type of WEC (typically in €) * @param opex Operating expense for this type of WEC (typically in €) - * @param cosphi Power factor for this type of WEC + * @param cosphiRated Power factor for this type of WEC * @param cpCharacteristic Betz curve of this type * @param etaConv Efficiency of converter for this type of WEC (typically in %) * @param sRated Rated apparent power for this type of WEC (typically in kVA) @@ -45,13 +45,13 @@ public WecTypeInput( String id, ComparableQuantity capex, ComparableQuantity opex, - double cosphi, + double cosphiRated, WecCharacteristicInput cpCharacteristic, ComparableQuantity etaConv, ComparableQuantity sRated, ComparableQuantity rotorArea, ComparableQuantity hubHeight) { - super(uuid, id, capex, opex, sRated.to(StandardUnits.S_RATED), cosphi); + super(uuid, id, capex, opex, sRated.to(StandardUnits.S_RATED), cosphiRated); this.cpCharacteristic = cpCharacteristic; this.etaConv = etaConv.to(StandardUnits.EFFICIENCY); this.rotorArea = rotorArea.to(StandardUnits.ROTOR_AREA); diff --git a/src/main/java/edu/ie3/datamodel/models/input/thermal/ThermalUnitInput.java b/src/main/java/edu/ie3/datamodel/models/input/thermal/ThermalUnitInput.java index 1946b1917..ad33c91f1 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/thermal/ThermalUnitInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/thermal/ThermalUnitInput.java @@ -5,7 +5,7 @@ */ package edu.ie3.datamodel.models.input.thermal; -import edu.ie3.datamodel.io.extractor.HasBus; +import edu.ie3.datamodel.io.extractor.HasThermalBus; import edu.ie3.datamodel.models.OperationTime; import edu.ie3.datamodel.models.input.AssetInput; import edu.ie3.datamodel.models.input.OperatorInput; @@ -13,18 +13,18 @@ import java.util.UUID; /** Abstract class for grouping all common properties to thermal models. */ -public abstract class ThermalUnitInput extends AssetInput implements HasBus { +public abstract class ThermalUnitInput extends AssetInput implements HasThermalBus { /** The thermal bus, a thermal unit is connected to. */ - private final ThermalBusInput bus; + private final ThermalBusInput thermalBus; /** * @param uuid Unique identifier of a certain thermal input * @param id Identifier of the thermal unit - * @param bus hermal bus, a thermal unit is connected to + * @param thermalBus hermal bus, a thermal unit is connected to */ - ThermalUnitInput(UUID uuid, String id, ThermalBusInput bus) { + ThermalUnitInput(UUID uuid, String id, ThermalBusInput thermalBus) { super(uuid, id); - this.bus = bus; + this.thermalBus = thermalBus; } /** @@ -32,21 +32,21 @@ public abstract class ThermalUnitInput extends AssetInput implements HasBus { * @param id Identifier of the thermal unit * @param operator operator of the asset * @param operationTime operation time of the asset - * @param bus thermal bus, a thermal unit is connected to + * @param thermalBus thermal bus, a thermal unit is connected to */ ThermalUnitInput( UUID uuid, String id, OperatorInput operator, OperationTime operationTime, - ThermalBusInput bus) { + ThermalBusInput thermalBus) { super(uuid, id, operator, operationTime); - this.bus = bus; + this.thermalBus = thermalBus; } @Override - public ThermalBusInput getBus() { - return bus; + public ThermalBusInput getThermalBus() { + return thermalBus; } @Override @@ -55,16 +55,16 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; if (!super.equals(o)) return false; ThermalUnitInput that = (ThermalUnitInput) o; - return bus.equals(that.bus); + return thermalBus.equals(that.thermalBus); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), bus); + return Objects.hash(super.hashCode(), thermalBus); } @Override public String toString() { - return "ThermalUnitInput{" + "bus=" + bus + '}'; + return "ThermalUnitInput{" + "bus=" + thermalBus + '}'; } } diff --git a/src/main/java/edu/ie3/datamodel/utils/ContainerUtils.java b/src/main/java/edu/ie3/datamodel/utils/ContainerUtils.java index 494ad7a0d..4290634aa 100644 --- a/src/main/java/edu/ie3/datamodel/utils/ContainerUtils.java +++ b/src/main/java/edu/ie3/datamodel/utils/ContainerUtils.java @@ -98,6 +98,7 @@ public static SystemParticipants filterForSubnet(SystemParticipants input, int s Set bmPlants = filterParticipants(input.getBmPlants(), subnet); Set chpPlants = filterParticipants(input.getChpPlants(), subnet); /* Electric vehicle charging systems are currently dummy implementations without nodal reverence */ + Set evs = filterParticipants(input.getEvs(), subnet); Set fixedFeedIns = filterParticipants(input.getFixedFeedIns(), subnet); Set heatpumps = filterParticipants(input.getHeatPumps(), subnet); Set loads = filterParticipants(input.getLoads(), subnet); @@ -109,6 +110,7 @@ public static SystemParticipants filterForSubnet(SystemParticipants input, int s bmPlants, chpPlants, new HashSet<>(), + evs, fixedFeedIns, heatpumps, loads, diff --git a/src/main/java/edu/ie3/datamodel/utils/ValidationUtils.java b/src/main/java/edu/ie3/datamodel/utils/ValidationUtils.java index a127a7a7d..0c4047bf8 100644 --- a/src/main/java/edu/ie3/datamodel/utils/ValidationUtils.java +++ b/src/main/java/edu/ie3/datamodel/utils/ValidationUtils.java @@ -24,6 +24,8 @@ import edu.ie3.datamodel.models.input.system.SystemParticipantInput; import edu.ie3.datamodel.models.voltagelevels.VoltageLevel; import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; import javax.measure.Quantity; @@ -414,15 +416,9 @@ public static void checkTransformer3WType(Transformer3WTypeInput trafoType) { new Quantity[] {trafoType.getgM(), trafoType.getbM(), trafoType.getdPhi()}, trafoType); detectZeroOrNegativeQuantities( new Quantity[] { - trafoType.getsRatedA(), - trafoType.getsRatedB(), - trafoType.getsRatedC(), - trafoType.getvRatedA(), - trafoType.getvRatedB(), - trafoType.getvRatedC(), - trafoType.getxScA(), - trafoType.getxScB(), - trafoType.getxScC(), + trafoType.getsRatedA(), trafoType.getsRatedB(), trafoType.getsRatedC(), + trafoType.getvRatedA(), trafoType.getvRatedB(), trafoType.getvRatedC(), + trafoType.getxScA(), trafoType.getxScB(), trafoType.getxScC(), trafoType.getdV() }, trafoType); @@ -518,4 +514,66 @@ private static void detectMalformedQuantities( throw new UnsafeEntityException(msg + ": " + malformedQuantities, entity); } } + + /** + * Determines if the provided set only contains elements with distinct UUIDs + * + * @param entities the set that should be checked + * @return true if all UUIDs of the provided entities are unique, false otherwise + */ + public static boolean distinctUuids(Set entities) { + return entities.stream() + .filter(distinctByKey(UniqueEntity::getUuid)) + .collect(Collectors.toSet()) + .size() + == entities.size(); + } + + /** + * Predicate that can be used to filter elements based on a given Function + * + * @param keyExtractor the function that should be used for the filter operations + * @param the type of the returning predicate + * @return the filter predicate that filters based on the provided function + */ + public static Predicate distinctByKey(Function keyExtractor) { + Set seen = ConcurrentHashMap.newKeySet(); + return t -> seen.add(keyExtractor.apply(t)); + } + + /** + * Checks if the provided set of unique entities only contains elements with distinct UUIDs and + * either returns a string with duplicated UUIDs or an empty optional otherwise. + * + * @param entities the entities that should be checkd for UUID uniqueness + * @return either a string wrapped in an optional with duplicate UUIDs or an empty optional + */ + public static Optional checkForDuplicateUuids(Set entities) { + if (distinctUuids(entities)) { + return Optional.empty(); + } + String duplicationsString = + entities.stream() + .collect(Collectors.groupingBy(UniqueEntity::getUuid, Collectors.counting())) + .entrySet() + .stream() + .filter(entry -> entry.getValue() > 1) + .map( + entry -> { + String duplicateEntitiesString = + entities.stream() + .filter(entity -> entity.getUuid().equals(entry.getKey())) + .map(UniqueEntity::toString) + .collect(Collectors.joining("\n - ")); + + return entry.getKey() + + ": " + + entry.getValue() + + "\n - " + + duplicateEntitiesString; + }) + .collect(Collectors.joining("\n\n")); + + return Optional.of(duplicationsString); + } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/extractor/ExtractorTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/extractor/ExtractorTest.groovy index 4674219ae..04ba7c010 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/extractor/ExtractorTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/extractor/ExtractorTest.groovy @@ -6,6 +6,9 @@ package edu.ie3.datamodel.io.extractor import edu.ie3.datamodel.exceptions.ExtractorException +import edu.ie3.datamodel.models.input.OperatorInput +import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput +import edu.ie3.datamodel.models.input.system.FixedFeedInInput import edu.ie3.test.common.GridTestData as gtd import edu.ie3.test.common.SystemParticipantTestData as sptd import edu.ie3.test.common.ThermalUnitInputTestData as tutd @@ -26,7 +29,7 @@ class ExtractorTest extends Specification { def "An Extractor should be able to extract an entity with nested elements correctly"() { expect: - Extractor.extractElements(nestedEntity) == expectedExtractedEntities + Extractor.extractElements(nestedEntity) as Set == expectedExtractedEntities as Set where: nestedEntity || expectedExtractedEntities @@ -41,7 +44,8 @@ class ExtractorTest extends Specification { gtd.transformerAtoBtoC.nodeB, gtd.transformerAtoBtoC.nodeC, gtd.transformerAtoBtoC.type, - gtd.transformerAtoBtoC.operator + gtd.transformerAtoBtoC.operator, + gtd.transformerAtoBtoC.nodeA.operator ] gtd.transformerCtoG || [ gtd.transformerCtoG.nodeA, @@ -52,64 +56,92 @@ class ExtractorTest extends Specification { gtd.switchAtoB || [ gtd.switchAtoB.nodeA, gtd.switchAtoB.nodeB, + gtd.switchAtoB.nodeA.operator, gtd.switchAtoB.operator ] sptd.fixedFeedInInput || [ sptd.fixedFeedInInput.node, - sptd.fixedFeedInInput.operator + sptd.fixedFeedInInput.operator, + sptd.fixedFeedInInput.node.operator ] sptd.wecInput || [ sptd.wecInput.node, sptd.wecInput.type, - sptd.wecInput.operator + sptd.wecInput.operator, + sptd.wecInput.node.operator ] sptd.chpInput || [ sptd.chpInput.node, + sptd.chpInput.node.operator, sptd.chpInput.type, - sptd.chpInput.operator + sptd.chpInput.thermalBus, + sptd.chpInput.thermalStorage, + sptd.chpInput.thermalStorage.thermalBus, + sptd.chpInput.thermalStorage.thermalBus.operator ] sptd.bmInput || [ sptd.bmInput.node, sptd.bmInput.type, - sptd.bmInput.operator + sptd.bmInput.operator, + sptd.bmInput.node.operator ] sptd.evInput || [ sptd.evInput.node, sptd.evInput.type, - sptd.evInput.operator + sptd.evInput.operator, + sptd.evInput.node.operator ] sptd.storageInput || [ sptd.storageInput.node, sptd.storageInput.type, - sptd.storageInput.operator + sptd.storageInput.operator, + sptd.storageInput.node.operator ] sptd.hpInput || [ sptd.hpInput.node, sptd.hpInput.type, - sptd.hpInput.operator + sptd.hpInput.operator, + sptd.hpInput.thermalBus, + sptd.hpInput.thermalBus.operator, + sptd.hpInput.node.operator ] - gtd.lineGraphicCtoD || [gtd.lineGraphicCtoD.line] + gtd.lineGraphicCtoD || [ + gtd.lineGraphicCtoD.line, + gtd.lineGraphicCtoD.line.nodeB, + gtd.lineGraphicCtoD.line.nodeA, + gtd.lineGraphicCtoD.line.type, + gtd.lineGraphicCtoD.line.operator + ] gtd.nodeGraphicC || [gtd.nodeGraphicC.node] + new NodeGraphicInput( + gtd.nodeGraphicC.uuid, + gtd.nodeGraphicC.graphicLayer, + gtd.nodeGraphicC.path, + null, + gtd.nodeGraphicC.point + ) || [null] gtd.measurementUnitInput || [ gtd.measurementUnitInput.node, gtd.measurementUnitInput.operator ] - tutd.thermalBusInput || [ - tutd.thermalBusInput.operator + tutd.thermalBus || [ + tutd.thermalBus.operator ] tutd.cylindricStorageInput || [ tutd.cylindricStorageInput.operator, - tutd.cylindricStorageInput.bus + tutd.cylindricStorageInput.thermalBus, + tutd.cylindricStorageInput.thermalBus.operator ] tutd.thermalHouseInput || [ tutd.thermalHouseInput.operator, - tutd.thermalHouseInput.bus + tutd.thermalHouseInput.thermalBus, + tutd.thermalHouseInput.thermalBus.operator ] } @@ -122,4 +154,24 @@ class ExtractorTest extends Specification { ex.message == "Unable to extract entity of class 'InvalidNestedExtensionClass'. " + "Does this class implements NestedEntity and one of its sub-interfaces correctly?" } + + def "An Extractor should not extract an operator that is marked as not assigned"() { + given: + def sampleFixedFeedInput = new FixedFeedInInput(UUID.fromString("717af017-cc69-406f-b452-e022d7fb516a"), "test_fixedFeedInInput", + OperatorInput.NO_OPERATOR_ASSIGNED, + sptd.fixedFeedInInput.operationTime, sptd.fixedFeedInInput.node, sptd.fixedFeedInInput.qCharacteristics, + sptd.fixedFeedInInput.sRated,sptd.fixedFeedInInput.cosphiRated) + expect: + Extractor.extractElements(sampleFixedFeedInput) as Set == [ + sptd.fixedFeedInInput.node, + sptd.fixedFeedInInput.node.operator] as Set + } + + def "An Extractor should not extract an operator that is marked as not assigned and not throw an exception if the resulting list empty"() { + given: + def sampleNodeInput = gtd.nodeB + + expect: + Extractor.extractElements(sampleNodeInput) == [] as Set + } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/AssetInputEntityFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/AssetInputEntityFactoryTest.groovy index 9ab7c9c3c..025673062 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/AssetInputEntityFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/AssetInputEntityFactoryTest.groovy @@ -239,7 +239,7 @@ class AssetInputEntityFactoryTest extends Specification implements FactoryTestHe } } - def "An AssetInputFactory should throw an exception on invalid or incomplete data"() { + def "An AssetInputFactory should throw an exception on invalid or incomplete data "() { given: "a system participant input type factory and model data" def inputFactory = new TestAssetInputFactory() Map parameter = [ @@ -254,8 +254,12 @@ class AssetInputEntityFactoryTest extends Specification implements FactoryTestHe then: FactoryException ex = thrown() - ex.message == "The provided fields [operatesfrom, operatesuntil, uuid] with data {operatesfrom -> 2019-01-01T00:00:00+01:00[Europe/Berlin],operatesuntil -> 2019-12-31T00:00:00+01:00[Europe/Berlin],uuid -> 91ec3bcf-1777-4d38-af67-0bf7c9fa73c7} are invalid for instance of TestAssetInput. \n" + - "The following fields to be passed to a constructor of TestAssetInput are possible:\n" + + ex.message == + "The provided fields [operatesfrom, operatesuntil, uuid] with data \n" + + "{operatesfrom -> 2019-01-01T00:00:00+01:00[Europe/Berlin],\n" + + "operatesuntil -> 2019-12-31T00:00:00+01:00[Europe/Berlin],\n" + + "uuid -> 91ec3bcf-1777-4d38-af67-0bf7c9fa73c7} are invalid for instance of TestAssetInput. \n" + + "The following fields to be passed to a constructor of 'TestAssetInput' are possible (NOT case-sensitive!):\n" + "0: [id, uuid]\n" + "1: [id, operatesfrom, uuid]\n" + "2: [id, operatesuntil, uuid]\n" + diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/CylindricalStorageInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/CylindricalStorageInputFactoryTest.groovy index f18d3e815..d27be440e 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/CylindricalStorageInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/CylindricalStorageInputFactoryTest.groovy @@ -49,7 +49,7 @@ class CylindricalStorageInputFactoryTest extends Specification implements Facto assert operationTime == OperationTime.notLimited() assert operator == OperatorInput.NO_OPERATOR_ASSIGNED assert id == parameter["id"] - assert bus == thermalBusInput + assert thermalBus == thermalBusInput assert storageVolumeLvl == getQuant(parameter["storagevolumelvl"], StandardUnits.VOLUME) assert storageVolumeLvlMin == getQuant(parameter["storagevolumelvlmin"], StandardUnits.VOLUME) assert inletTemp == getQuant(parameter["inlettemp"], StandardUnits.TEMPERATURE) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy index 9d3275242..680be3bb0 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy @@ -54,7 +54,7 @@ class LineInputFactoryTest extends Specification implements FactoryTestHelper { def typeInput = Mock(LineTypeInput) when: - Optional input = inputFactory.getEntity(new LineInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) + Optional input = inputFactory.getEntity(new TypedConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) then: input.present @@ -69,7 +69,7 @@ class LineInputFactoryTest extends Specification implements FactoryTestHelper { assert nodeA == nodeInputA assert nodeB == nodeInputB assert type == typeInput - assert noOfParallelDevices == Integer.parseInt(parameter["paralleldevices"]) + assert parallelDevices == Integer.parseInt(parameter["paralleldevices"]) assert length == getQuant(parameter["length"], StandardUnits.LINE_LENGTH) assert geoPosition == getGeometry(parameter["geoposition"]) olmCharacteristic.with { @@ -105,7 +105,7 @@ class LineInputFactoryTest extends Specification implements FactoryTestHelper { def typeInput = Mock(LineTypeInput) when: - Optional input = inputFactory.getEntity(new LineInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) + Optional input = inputFactory.getEntity(new TypedConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) then: input.present @@ -120,7 +120,7 @@ class LineInputFactoryTest extends Specification implements FactoryTestHelper { assert nodeA == nodeInputA assert nodeB == nodeInputB assert type == typeInput - assert noOfParallelDevices == Integer.parseInt(parameter["paralleldevices"]) + assert parallelDevices == Integer.parseInt(parameter["paralleldevices"]) assert length == getQuant(parameter["length"], StandardUnits.LINE_LENGTH) assert geoPosition == getGeometry(parameter["geoposition"]) olmCharacteristic.with { diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputFactoryTest.groovy index d4b4a8809..a533d029d 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/MeasurementUnitInputFactoryTest.groovy @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.factory.input + import edu.ie3.datamodel.models.OperationTime import edu.ie3.datamodel.models.input.MeasurementUnitInput import edu.ie3.datamodel.models.input.NodeInput @@ -37,7 +38,7 @@ class MeasurementUnitInputFactoryTest extends Specification implements FactoryTe def nodeInput = Mock(NodeInput) when: - Optional input = inputFactory.getEntity(new MeasurementUnitInputEntityData(parameter, inputClass, nodeInput)) + Optional input = inputFactory.getEntity(new NodeAssetInputEntityData(parameter, inputClass, nodeInput)) then: input.present diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/ThermalHouseInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/ThermalHouseInputFactoryTest.groovy index d6f093c22..836bb1482 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/ThermalHouseInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/ThermalHouseInputFactoryTest.groovy @@ -46,7 +46,7 @@ class ThermalHouseInputFactoryTest extends Specification implements FactoryTestH assert operationTime == OperationTime.notLimited() assert operator == OperatorInput.NO_OPERATOR_ASSIGNED assert id == parameter["id"] - assert bus == thermalBusInput + assert thermalBus == thermalBusInput assert ethLosses == getQuant(parameter["ethlosses"], StandardUnits.THERMAL_TRANSMISSION) assert ethCapa == getQuant(parameter["ethcapa"], StandardUnits.HEAT_CAPACITY) } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer2WInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer2WInputFactoryTest.groovy index 85e0167f4..96e21a831 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer2WInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer2WInputFactoryTest.groovy @@ -43,7 +43,7 @@ class Transformer2WInputFactoryTest extends Specification implements FactoryTest def typeInput = Mock(Transformer2WTypeInput) when: - Optional input = inputFactory.getEntity(new Transformer2WInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) + Optional input = inputFactory.getEntity(new TypedConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) then: input.present @@ -58,7 +58,7 @@ class Transformer2WInputFactoryTest extends Specification implements FactoryTest assert nodeA == nodeInputA assert nodeB == nodeInputB assert type == typeInput - assert noOfParallelDevices == Integer.parseInt(parameter["paralleldevices"]) + assert parallelDevices == Integer.parseInt(parameter["paralleldevices"]) assert tapPos == Integer.parseInt(parameter["tappos"]) assert autoTap } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer3WInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer3WInputFactoryTest.groovy index 85ee116b7..5e2740c54 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer3WInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/Transformer3WInputFactoryTest.groovy @@ -54,7 +54,7 @@ class Transformer3WInputFactoryTest extends Specification implements FactoryTes assert nodeB == nodeInputB assert nodeC == nodeInputC assert type == typeInput - assert noOfParallelDevices == Integer.parseInt(parameter["paralleldevices"]) + assert parallelDevices == Integer.parseInt(parameter["paralleldevices"]) assert tapPos == Integer.parseInt(parameter["tappos"]) assert autoTap } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EvInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EvInputFactoryTest.groovy index 5af8a526c..8f9407565 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EvInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/EvInputFactoryTest.groovy @@ -46,7 +46,7 @@ class EvInputFactoryTest extends Specification implements FactoryTestHelper { when: Optional input = inputFactory.getEntity( - new SystemParticipantTypedEntityData(parameter, inputClass,operatorInput, nodeInput, typeInput)) + new SystemParticipantTypedEntityData(parameter, inputClass, operatorInput, nodeInput, typeInput)) then: input.present diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/FixedFeedInInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/FixedFeedInInputFactoryTest.groovy index 528f4b55d..35de0102d 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/FixedFeedInInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/FixedFeedInInputFactoryTest.groovy @@ -8,6 +8,7 @@ package edu.ie3.datamodel.io.factory.input.participant import static edu.ie3.util.quantities.PowerSystemUnits.PU import edu.ie3.datamodel.exceptions.FactoryException +import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput @@ -47,7 +48,7 @@ class FixedFeedInInputFactoryTest extends Specification implements FactoryTestHe def operatorInput = Mock(OperatorInput) when: - Optional input = inputFactory.getEntity(new SystemParticipantEntityData(parameter, inputClass, operatorInput, nodeInput)) + Optional input = inputFactory.getEntity(new NodeAssetInputEntityData(parameter, inputClass, operatorInput, nodeInput)) then: input.present @@ -84,12 +85,16 @@ class FixedFeedInInputFactoryTest extends Specification implements FactoryTestHe def nodeInput = Mock(NodeInput) when: - inputFactory.getEntity(new SystemParticipantEntityData(parameter, inputClass, nodeInput)) + inputFactory.getEntity(new NodeAssetInputEntityData(parameter, inputClass, nodeInput)) then: FactoryException ex = thrown() - ex.message == "The provided fields [cosphirated, id, srated, uuid] with data {cosphirated -> 4,id -> TestID,srated -> 3,uuid -> 91ec3bcf-1777-4d38-af67-0bf7c9fa73c7} are invalid for instance of FixedFeedInInput. \n" + - "The following fields to be passed to a constructor of FixedFeedInInput are possible:\n" + + ex.message == "The provided fields [cosphirated, id, srated, uuid] with data \n" + + "{cosphirated -> 4,\n" + + "id -> TestID,\n" + + "srated -> 3,\n" + + "uuid -> 91ec3bcf-1777-4d38-af67-0bf7c9fa73c7} are invalid for instance of FixedFeedInInput. \n" + + "The following fields to be passed to a constructor of 'FixedFeedInInput' are possible (NOT case-sensitive!):\n" + "0: [cosphirated, id, qcharacteristics, srated, uuid]\n" + "1: [cosphirated, id, operatesfrom, qcharacteristics, srated, uuid]\n" + "2: [cosphirated, id, operatesuntil, qcharacteristics, srated, uuid]\n" + diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/LoadInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/LoadInputFactoryTest.groovy index 0b43a3efc..accb49a21 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/LoadInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/LoadInputFactoryTest.groovy @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.factory.input.participant +import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData import static edu.ie3.util.quantities.PowerSystemUnits.PU import edu.ie3.datamodel.models.BdewLoadProfile @@ -34,21 +35,21 @@ class LoadInputFactoryTest extends Specification implements FactoryTestHelper { given: "a system participant input type factory and model data" def inputFactory = new LoadInputFactory() Map parameter = [ - "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", - "id" : "TestID", - "qcharacteristics": "cosPhiFixed:{(0.0,1.0)}", - "slp" : "G-4", - "dsm" : "true", - "econsannual" : "3", - "srated" : "4", - "cosphi" : "5" + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "TestID", + "qcharacteristics" : "cosPhiFixed:{(0.0,1.0)}", + "standardloadprofile": "G-4", + "dsm" : "true", + "econsannual" : "3", + "srated" : "4", + "cosphirated" : "5" ] def inputClass = LoadInput def nodeInput = Mock(NodeInput) when: Optional input = inputFactory.getEntity( - new SystemParticipantEntityData(parameter, inputClass, nodeInput)) + new NodeAssetInputEntityData(parameter, inputClass, nodeInput)) then: input.present @@ -69,7 +70,7 @@ class LoadInputFactoryTest extends Specification implements FactoryTestHelper { assert dsm assert eConsAnnual == getQuant(parameter["econsannual"], StandardUnits.ENERGY_IN) assert sRated == getQuant(parameter["srated"], StandardUnits.S_RATED) - assert cosphiRated == Double.parseDouble(parameter["cosphi"]) + assert cosphiRated == Double.parseDouble(parameter["cosphirated"]) } } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/PvInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/PvInputFactoryTest.groovy index 1e157ed0a..abc1395ba 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/PvInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/participant/PvInputFactoryTest.groovy @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.factory.input.participant +import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData import static edu.ie3.util.quantities.PowerSystemUnits.PU import edu.ie3.datamodel.models.StandardUnits @@ -46,7 +47,7 @@ class PvInputFactoryTest extends Specification implements FactoryTestHelper { "kt" : "8", "marketreaction" : "true", "srated" : "9", - "cosphi" : "10", + "cosphirated" : "10", ] def inputClass = PvInput def nodeInput = Mock(NodeInput) @@ -54,7 +55,7 @@ class PvInputFactoryTest extends Specification implements FactoryTestHelper { when: Optional input = inputFactory.getEntity( - new SystemParticipantEntityData(parameter, inputClass, operatorInput, nodeInput)) + new NodeAssetInputEntityData(parameter, inputClass, operatorInput, nodeInput)) then: input.present @@ -82,7 +83,7 @@ class PvInputFactoryTest extends Specification implements FactoryTestHelper { assert kT == Double.parseDouble(parameter["kt"]) assert marketReaction assert sRated == getQuant(parameter["srated"], StandardUnits.S_RATED) - assert cosphiRated == Double.parseDouble(parameter["cosphi"]) + assert cosphiRated == Double.parseDouble(parameter["cosphirated"]) } } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/result/NodeResultFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/result/NodeResultFactoryTest.groovy index d81c5924b..7867bb229 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/result/NodeResultFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/result/NodeResultFactoryTest.groovy @@ -62,8 +62,11 @@ class NodeResultFactoryTest extends Specification implements FactoryTestHelper { then: FactoryException ex = thrown() - ex.message == "The provided fields [inputModel, timestamp, vmag] with data {inputModel -> 91ec3bcf-1897-4d38-af67-0bf7c9fa73c7,timestamp -> 2020-01-30 17:26:44,vmag -> 2} are invalid for instance of NodeResult. \n" + - "The following fields to be passed to a constructor of NodeResult are possible:\n" + + ex.message == "The provided fields [inputModel, timestamp, vmag] with data \n" + + "{inputModel -> 91ec3bcf-1897-4d38-af67-0bf7c9fa73c7,\n" + + "timestamp -> 2020-01-30 17:26:44,\n" + + "vmag -> 2} are invalid for instance of NodeResult. \n" + + "The following fields to be passed to a constructor of 'NodeResult' are possible (NOT case-sensitive!):\n" + "0: [inputModel, timestamp, vang, vmag]\n" + "1: [inputModel, timestamp, uuid, vang, vmag]\n" } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/result/SystemParticipantResultFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/result/SystemParticipantResultFactoryTest.groovy index 9f2cfa5ac..a8acc253f 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/result/SystemParticipantResultFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/result/SystemParticipantResultFactoryTest.groovy @@ -116,8 +116,11 @@ class SystemParticipantResultFactoryTest extends Specification implements Factor then: FactoryException ex = thrown() - ex.message == "The provided fields [inputModel, q, timestamp] with data {inputModel -> 91ec3bcf-1777-4d38-af67-0bf7c9fa73c7,q -> 2,timestamp -> 2020-01-30 17:26:44} are invalid for instance of WecResult. \n" + - "The following fields to be passed to a constructor of WecResult are possible:\n" + + ex.message == "The provided fields [inputModel, q, timestamp] with data \n" + + "{inputModel -> 91ec3bcf-1777-4d38-af67-0bf7c9fa73c7,\n" + + "q -> 2,\n" + + "timestamp -> 2020-01-30 17:26:44} are invalid for instance of WecResult. \n" + + "The following fields to be passed to a constructor of 'WecResult' are possible (NOT case-sensitive!):\n" + "0: [inputModel, p, q, timestamp]\n" + "1: [inputModel, p, q, timestamp, uuid]\n" } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/SystemParticipantTypeInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/SystemParticipantTypeInputFactoryTest.groovy index 42ab3d6e2..e9a0afa68 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/SystemParticipantTypeInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/typeinput/SystemParticipantTypeInputFactoryTest.groovy @@ -47,7 +47,7 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac "capex": "3", "opex": "4", "srated": "5", - "cosphi": "6", + "cosphirated": "6", "estorage": "7", "econs": "8", @@ -67,7 +67,7 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac assert capex == getQuant(parameter["capex"], StandardUnits.CAPEX) assert opex == getQuant(parameter["opex"], StandardUnits.ENERGY_PRICE) assert sRated == getQuant(parameter["srated"], StandardUnits.S_RATED) - assert cosphiRated == Double.parseDouble(parameter["cosphi"]) + assert cosphiRated == Double.parseDouble(parameter["cosphirated"]) assert eStorage == getQuant(parameter["estorage"], StandardUnits.ENERGY_IN) assert eCons == getQuant(parameter["econs"], StandardUnits.ENERGY_PER_DISTANCE) @@ -83,7 +83,7 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac "capex": "3", "opex": "4", "srated": "5", - "cosphi": "6", + "cosphirated": "6", "pthermal": "7", ] @@ -102,7 +102,7 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac assert capex == getQuant(parameter["capex"], StandardUnits.CAPEX) assert opex == getQuant(parameter["opex"], StandardUnits.ENERGY_PRICE) assert sRated == getQuant(parameter["srated"], StandardUnits.S_RATED) - assert cosphiRated == Double.parseDouble(parameter["cosphi"]) + assert cosphiRated == Double.parseDouble(parameter["cosphirated"]) assert pThermal == getQuant(parameter["pthermal"], StandardUnits.ACTIVE_POWER_IN) } @@ -117,7 +117,7 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac "capex": "3", "opex": "4", "srated": "5", - "cosphi": "6", + "cosphirated": "6", "activepowergradient": "7", "etaconv": "8" ] @@ -136,7 +136,7 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac assert capex == getQuant(parameter["capex"], StandardUnits.CAPEX) assert opex == getQuant(parameter["opex"], StandardUnits.ENERGY_PRICE) assert sRated == getQuant(parameter["srated"], StandardUnits.S_RATED) - assert cosphiRated == Double.parseDouble(parameter["cosphi"]) + assert cosphiRated == Double.parseDouble(parameter["cosphirated"]) assert activePowerGradient == getQuant(parameter["activepowergradient"], StandardUnits.ACTIVE_POWER_GRADIENT) assert etaConv == getQuant(parameter["etaconv"], StandardUnits.EFFICIENCY) @@ -152,7 +152,7 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac "capex": "3", "opex": "4", "srated": "5", - "cosphi": "6", + "cosphirated": "6", "cpCharacteristic": "cP:{(10.00,0.05),(15.00,0.10),(20.00,0.20)}", "etaconv": "7", @@ -174,7 +174,7 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac assert capex == getQuant(parameter["capex"], StandardUnits.CAPEX) assert opex == getQuant(parameter["opex"], StandardUnits.ENERGY_PRICE) assert sRated == getQuant(parameter["srated"], StandardUnits.S_RATED) - assert cosphiRated == Double.parseDouble(parameter["cosphi"]) + assert cosphiRated == Double.parseDouble(parameter["cosphirated"]) cpCharacteristic.with { assert uuid != null @@ -199,7 +199,7 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac "capex": "3", "opex": "4", "srated": "5", - "cosphi": "6", + "cosphirated": "6", "etael": "7", "etathermal": "8", @@ -221,7 +221,7 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac assert capex == getQuant(parameter["capex"], StandardUnits.CAPEX) assert opex == getQuant(parameter["opex"], StandardUnits.ENERGY_PRICE) assert sRated == getQuant(parameter["srated"], StandardUnits.S_RATED) - assert cosphiRated == Double.parseDouble(parameter["cosphi"]) + assert cosphiRated == Double.parseDouble(parameter["cosphirated"]) assert etaEl == getQuant(parameter["etael"], StandardUnits.EFFICIENCY) assert etaThermal == getQuant(parameter["etathermal"], StandardUnits.EFFICIENCY) @@ -239,7 +239,7 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac "capex" : "3", "opex" : "4", "srated" : "5", - "cosphi" : "6", + "cosphirated" : "6", "estorage" : "6", "pmax" : "8", @@ -264,7 +264,7 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac assert capex == getQuant(parameter["capex"], StandardUnits.CAPEX) assert opex == getQuant(parameter["opex"], StandardUnits.ENERGY_PRICE) assert sRated == getQuant(parameter["srated"], StandardUnits.S_RATED) - assert cosphiRated == Double.parseDouble(parameter["cosphi"]) + assert cosphiRated == Double.parseDouble(parameter["cosphirated"]) assert eStorage == getQuant(parameter["estorage"], StandardUnits.ENERGY_IN) assert pMax == getQuant(parameter["pmax"], StandardUnits.ACTIVE_POWER_IN) @@ -285,8 +285,7 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac "capex": "3", "opex": "4", "srated": "5", - "cosphi": "6", - + "cosphirated": "6", "estorage": "6", "pmin": "7", "pmax": "8", @@ -300,8 +299,20 @@ class SystemParticipantTypeInputFactoryTest extends Specification implements Fac then: FactoryException ex = thrown() - ex.message == "The provided fields [capex, cosphi, dod, estorage, eta, id, lifetime, opex, pmax, pmin, srated, uuid] with data {capex -> 3,cosphi -> 6,dod -> 10,estorage -> 6,eta -> 9,id -> blablub,lifetime -> 11,opex -> 4,pmax -> 8,pmin -> 7,srated -> 5,uuid -> 91ec3bcf-1777-4d38-af67-0bf7c9fa73c7} are invalid for instance of StorageTypeInput. \n" + - "The following fields to be passed to a constructor of StorageTypeInput are possible:\n" + - "0: [activepowergradient, capex, cosphi, dod, estorage, eta, id, lifecycle, lifetime, opex, pmax, srated, uuid]\n" + ex.message == "The provided fields [capex, cosphirated, dod, estorage, eta, id, lifetime, opex, pmax, pmin, srated, uuid] with data \n" + + "{capex -> 3,\n" + + "cosphirated -> 6,\n" + + "dod -> 10,\n" + + "estorage -> 6,\n" + + "eta -> 9,\n" + + "id -> blablub,\n" + + "lifetime -> 11,\n" + + "opex -> 4,\n" + + "pmax -> 8,\n" + + "pmin -> 7,\n" + + "srated -> 5,\n" + + "uuid -> 91ec3bcf-1777-4d38-af67-0bf7c9fa73c7} are invalid for instance of StorageTypeInput. \n" + + "The following fields to be passed to a constructor of 'StorageTypeInput' are possible (NOT case-sensitive!):\n" + + "0: [activepowergradient, capex, cosphirated, dod, estorage, eta, id, lifecycle, lifetime, opex, pmax, srated, uuid]\n" } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/processor/ProcessorProviderTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/processor/ProcessorProviderTest.groovy index 8cf9bc972..7fbb5582e 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/processor/ProcessorProviderTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/processor/ProcessorProviderTest.groovy @@ -242,7 +242,7 @@ class ProcessorProviderTest extends Specification implements TimeSeriesTestData "inputModel": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", "p" : "0.01", "q" : "0.01", - "timestamp" : "2020-01-30 17:26:44"] + "timestamp" : "2020-01-30T17:26:44Z[UTC]"] when: UUID uuid = UUID.fromString("22bea5fc-2cb2-4c61-beb9-b476e0107f52") diff --git a/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy index 6cdee606d..ccbb70f51 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.processor.input +import edu.ie3.datamodel.models.OperationTime import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.input.NodeInput import edu.ie3.datamodel.models.input.OperatorInput @@ -33,15 +34,19 @@ import edu.ie3.datamodel.models.input.system.type.EvTypeInput import edu.ie3.datamodel.models.input.system.type.HpTypeInput import edu.ie3.datamodel.models.input.system.type.StorageTypeInput import edu.ie3.datamodel.models.input.system.type.WecTypeInput +import edu.ie3.datamodel.models.voltagelevels.GermanVoltageLevelUtils import edu.ie3.test.common.GridTestData import edu.ie3.test.common.SystemParticipantTestData import edu.ie3.test.common.TypeTestData import edu.ie3.util.TimeTools import spock.lang.Specification +import tec.uom.se.quantity.Quantities import java.time.ZoneId import java.time.ZonedDateTime +import static edu.ie3.util.quantities.PowerSystemUnits.PU + /** * Testing the function of processors * @@ -59,12 +64,12 @@ class InputEntityProcessorTest extends Specification { def validResult = GridTestData.nodeA Map expectedResults = [ - "uuid" : "5dc88077-aeb6-4711-9142-db57292640b1", + "uuid" : "4ca90220-74c2-4369-9afa-a18bf068840d", "geoPosition" : "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "id" : "node_a", - "operatesUntil": "2020-03-25 15:11:31", - "operatesFrom" : "2020-03-24 15:11:31", - "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesUntil": "2020-03-25T15:11:31Z[UTC]", + "operatesFrom" : "2020-03-24T15:11:31Z[UTC]", + "operator" : "f15105c4-a2de-4ab8-a621-4bc98e372d92", "slack" : "true", "subnet" : "1", "vTarget" : "1.0", @@ -92,66 +97,62 @@ class InputEntityProcessorTest extends Specification { then: "make sure that the result is as expected " processingResult.present - processingResult.get().forEach { k, v -> - if (k != "nodeInternal") // the internal 3w node is always randomly generated, hence we can skip to test on this - assert (v == expectedResult.get(k)) - } + processingResult.get() == expectedResult where: modelClass | modelInstance || expectedResult Transformer3WInput | GridTestData.transformerAtoBtoC || [ - "uuid" : "5dc88077-aeb6-4711-9142-db57292640b1", - "autoTap" : "true", - "id" : "3w_test", - "noOfParallelDevices": "1", - "nodeA" : "5dc88077-aeb6-4711-9142-db57292640b1", - "nodeB" : "47d29df0-ba2d-4d23-8e75-c82229c5c758", - "nodeC" : "bd837a25-58f3-44ac-aa90-c6b6e3cd91b2", - "operatesUntil" : "2020-03-25 15:11:31", - "operatesFrom" : "2020-03-24 15:11:31", - "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", - "tapPos" : "0", - "type" : "5b0ee546-21fb-4a7f-a801-5dbd3d7bb356" + "uuid" : "cc327469-7d56-472b-a0df-edbb64f90e8f", + "autoTap" : "true", + "id" : "3w_test", + "parallelDevices": "1", + "nodeA" : "4ca90220-74c2-4369-9afa-a18bf068840d", + "nodeB" : "47d29df0-ba2d-4d23-8e75-c82229c5c758", + "nodeC" : "bd837a25-58f3-44ac-aa90-c6b6e3cd91b2", + "operatesUntil" : "2020-03-25T15:11:31Z[UTC]", + "operatesFrom" : "2020-03-24T15:11:31Z[UTC]", + "operator" : "f15105c4-a2de-4ab8-a621-4bc98e372d92", + "tapPos" : "0", + "type" : "5b0ee546-21fb-4a7f-a801-5dbd3d7bb356" ] Transformer2WInput | GridTestData.transformerCtoG || [ - "uuid" : "5dc88077-aeb6-4711-9142-db57292640b1", - "autoTap" : "true", - "id" : "2w_parallel_2", - "noOfParallelDevices": "1", - "nodeA" : "bd837a25-58f3-44ac-aa90-c6b6e3cd91b2", - "nodeB" : "aaa74c1a-d07e-4615-99a5-e991f1d81cc4", - "operatesUntil" : "2020-03-25 15:11:31", - "operatesFrom" : "2020-03-24 15:11:31", - "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", - "tapPos" : "0", - "type" : "08559390-d7c0-4427-a2dc-97ba312ae0ac" + "uuid" : "5dc88077-aeb6-4711-9142-db57292640b1", + "autoTap" : "true", + "id" : "2w_parallel_2", + "parallelDevices": "1", + "nodeA" : "bd837a25-58f3-44ac-aa90-c6b6e3cd91b2", + "nodeB" : "aaa74c1a-d07e-4615-99a5-e991f1d81cc4", + "operatesUntil" : "2020-03-25T15:11:31Z[UTC]", + "operatesFrom" : "2020-03-24T15:11:31Z[UTC]", + "operator" : "f15105c4-a2de-4ab8-a621-4bc98e372d92", + "tapPos" : "0", + "type" : "08559390-d7c0-4427-a2dc-97ba312ae0ac" ] SwitchInput | GridTestData.switchAtoB || [ - "uuid" : "5dc88077-aeb6-4711-9142-db57287640b1", - "closed" : "true", - "id" : "test_switch_AtoB", - "noOfParallelDevices": "1", - "nodeA" : "5dc88077-aeb6-4711-9142-db57292640b1", - "nodeB" : "47d29df0-ba2d-4d23-8e75-c82229c5c758", - "operatesUntil" : "2020-03-25 15:11:31", - "operatesFrom" : "2020-03-24 15:11:31", - "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510" + "uuid" : "5dc88077-aeb6-4711-9142-db57287640b1", + "closed" : "true", + "id" : "test_switch_AtoB", + "nodeA" : "4ca90220-74c2-4369-9afa-a18bf068840d", + "nodeB" : "47d29df0-ba2d-4d23-8e75-c82229c5c758", + "operatesUntil": "2020-03-25T15:11:31Z[UTC]", + "operatesFrom" : "2020-03-24T15:11:31Z[UTC]", + "operator" : "f15105c4-a2de-4ab8-a621-4bc98e372d92" ] LineInput | GridTestData.lineCtoD || [ - "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", - "geoPosition" : "{\"type\":\"LineString\",\"coordinates\":[[7.411111,51.492528],[7.414116,51.484136]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", - "id" : "test_line_AtoB", - "length" : "0.003", - "noOfParallelDevices": "2", - "nodeA" : "bd837a25-58f3-44ac-aa90-c6b6e3cd91b2", - "nodeB" : "6e0980e0-10f2-4e18-862b-eb2b7c90509b", - "olmCharacteristic" : "olm:{(0.00,1.00)}", - "operatesUntil" : "2020-03-25 15:11:31", - "operatesFrom" : "2020-03-24 15:11:31", - "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", - "type" : "3bed3eb3-9790-4874-89b5-a5434d408088" + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "geoPosition" : "{\"type\":\"LineString\",\"coordinates\":[[7.411111,51.492528],[7.414116,51.484136]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "id" : "test_line_CtoD", + "length" : "0.003", + "parallelDevices" : "2", + "nodeA" : "bd837a25-58f3-44ac-aa90-c6b6e3cd91b2", + "nodeB" : "6e0980e0-10f2-4e18-862b-eb2b7c90509b", + "olmCharacteristic": "olm:{(0.00,1.00)}", + "operatesUntil" : "2020-03-25T15:11:31Z[UTC]", + "operatesFrom" : "2020-03-24T15:11:31Z[UTC]", + "operator" : "f15105c4-a2de-4ab8-a621-4bc98e372d92", + "type" : "3bed3eb3-9790-4874-89b5-a5434d408088" ] } @@ -178,8 +179,8 @@ class InputEntityProcessorTest extends Specification { "cosphiRated" : SystemParticipantTestData.fixedFeedInInput.cosphiRated.toString(), "id" : SystemParticipantTestData.fixedFeedInInput.id, "node" : SystemParticipantTestData.fixedFeedInInput.node.uuid.toString(), - "operatesUntil" : TimeTools.toString(SystemParticipantTestData.fixedFeedInInput.operationTime.endDate.orElse(ZonedDateTime.now())), - "operatesFrom" : TimeTools.toString(SystemParticipantTestData.fixedFeedInInput.operationTime.startDate.orElse(ZonedDateTime.now())), + "operatesUntil" : SystemParticipantTestData.fixedFeedInInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), + "operatesFrom" : SystemParticipantTestData.fixedFeedInInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), "operator" : SystemParticipantTestData.fixedFeedInInput.operator.getUuid().toString(), "qCharacteristics": SystemParticipantTestData.cosPhiFixedDeSerialized, "sRated" : SystemParticipantTestData.fixedFeedInInput.sRated.to(StandardUnits.S_RATED).getValue().doubleValue().toString() @@ -196,8 +197,8 @@ class InputEntityProcessorTest extends Specification { "kT" : SystemParticipantTestData.pvInput.kT.toString(), "marketReaction" : SystemParticipantTestData.pvInput.marketReaction.toString(), "node" : SystemParticipantTestData.pvInput.node.uuid.toString(), - "operatesUntil" : TimeTools.toString(SystemParticipantTestData.pvInput.operationTime.endDate.orElse(ZonedDateTime.now())), - "operatesFrom" : TimeTools.toString(SystemParticipantTestData.pvInput.operationTime.startDate.orElse(ZonedDateTime.now())), + "operatesUntil" : SystemParticipantTestData.pvInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), + "operatesFrom" : SystemParticipantTestData.pvInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), "operator" : SystemParticipantTestData.pvInput.operator.getUuid().toString(), "qCharacteristics": SystemParticipantTestData.cosPhiFixedDeSerialized, "sRated" : SystemParticipantTestData.pvInput.sRated.to(StandardUnits.S_RATED).getValue().doubleValue().toString() @@ -207,8 +208,8 @@ class InputEntityProcessorTest extends Specification { "id" : SystemParticipantTestData.wecInput.id, "marketReaction" : SystemParticipantTestData.wecInput.marketReaction.toString(), "node" : SystemParticipantTestData.wecInput.node.uuid.toString(), - "operatesUntil" : TimeTools.toString(SystemParticipantTestData.wecInput.operationTime.endDate.orElse(ZonedDateTime.now())), - "operatesFrom" : TimeTools.toString(SystemParticipantTestData.wecInput.operationTime.startDate.orElse(ZonedDateTime.now())), + "operatesUntil" : SystemParticipantTestData.wecInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), + "operatesFrom" : SystemParticipantTestData.wecInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), "operator" : SystemParticipantTestData.wecInput.operator.getUuid().toString(), "qCharacteristics": SystemParticipantTestData.cosPhiPDeSerialized, "type" : SystemParticipantTestData.wecInput.type.getUuid().toString() @@ -218,8 +219,8 @@ class InputEntityProcessorTest extends Specification { "id" : SystemParticipantTestData.chpInput.id, "marketReaction" : SystemParticipantTestData.chpInput.marketReaction.toString(), "node" : SystemParticipantTestData.chpInput.node.getUuid().toString(), - "operatesUntil" : TimeTools.toString(SystemParticipantTestData.chpInput.operationTime.endDate.orElse(ZonedDateTime.now())), - "operatesFrom" : TimeTools.toString(SystemParticipantTestData.chpInput.operationTime.startDate.orElse(ZonedDateTime.now())), + "operatesUntil" : SystemParticipantTestData.chpInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), + "operatesFrom" : SystemParticipantTestData.chpInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), "operator" : SystemParticipantTestData.chpInput.operator.getUuid().toString(), "qCharacteristics": SystemParticipantTestData.cosPhiFixedDeSerialized, "thermalBus" : SystemParticipantTestData.chpInput.thermalBus.getUuid().toString(), @@ -233,8 +234,8 @@ class InputEntityProcessorTest extends Specification { "id" : SystemParticipantTestData.bmInput.id, "marketReaction" : SystemParticipantTestData.bmInput.marketReaction.toString(), "node" : SystemParticipantTestData.bmInput.node.uuid.toString(), - "operatesUntil" : TimeTools.toString(SystemParticipantTestData.bmInput.operationTime.endDate.orElse(ZonedDateTime.now())), - "operatesFrom" : TimeTools.toString(SystemParticipantTestData.bmInput.operationTime.startDate.orElse(ZonedDateTime.now())), + "operatesUntil" : SystemParticipantTestData.bmInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), + "operatesFrom" : SystemParticipantTestData.bmInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), "operator" : SystemParticipantTestData.bmInput.operator.getUuid().toString(), "qCharacteristics": SystemParticipantTestData.qVDeSerialized, "type" : SystemParticipantTestData.bmInput.type.getUuid().toString() @@ -243,8 +244,8 @@ class InputEntityProcessorTest extends Specification { "uuid" : SystemParticipantTestData.evInput.uuid.toString(), "id" : SystemParticipantTestData.evInput.id, "node" : SystemParticipantTestData.evInput.node.uuid.toString(), - "operatesUntil" : TimeTools.toString(SystemParticipantTestData.evInput.operationTime.endDate.orElse(ZonedDateTime.now())), - "operatesFrom" : TimeTools.toString(SystemParticipantTestData.evInput.operationTime.startDate.orElse(ZonedDateTime.now())), + "operatesUntil" : SystemParticipantTestData.evInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), + "operatesFrom" : SystemParticipantTestData.evInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), "operator" : SystemParticipantTestData.evInput.operator.getUuid().toString(), "qCharacteristics": SystemParticipantTestData.cosPhiFixedDeSerialized, "type" : SystemParticipantTestData.evInput.type.getUuid().toString() @@ -257,8 +258,8 @@ class InputEntityProcessorTest extends Specification { "eConsAnnual" : SystemParticipantTestData.loadInput.eConsAnnual.getValue().doubleValue().toString(), "id" : SystemParticipantTestData.loadInput.id, "node" : SystemParticipantTestData.loadInput.node.uuid.toString(), - "operatesUntil" : TimeTools.toString(SystemParticipantTestData.loadInput.operationTime.endDate.orElse(ZonedDateTime.now())), - "operatesFrom" : TimeTools.toString(SystemParticipantTestData.loadInput.operationTime.startDate.orElse(ZonedDateTime.now())), + "operatesUntil" : SystemParticipantTestData.loadInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), + "operatesFrom" : SystemParticipantTestData.loadInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), "operator" : SystemParticipantTestData.loadInput.operator.getUuid().toString(), "qCharacteristics" : SystemParticipantTestData.cosPhiFixedDeSerialized, "sRated" : SystemParticipantTestData.loadInput.sRated.getValue().doubleValue().toString(), @@ -269,8 +270,8 @@ class InputEntityProcessorTest extends Specification { "behaviour" : SystemParticipantTestData.storageInput.behaviour.token, "id" : SystemParticipantTestData.storageInput.id, "node" : SystemParticipantTestData.storageInput.node.uuid.toString(), - "operatesUntil" : TimeTools.toString(SystemParticipantTestData.storageInput.operationTime.endDate.orElse(ZonedDateTime.now())), - "operatesFrom" : TimeTools.toString(SystemParticipantTestData.storageInput.operationTime.startDate.orElse(ZonedDateTime.now())), + "operatesUntil" : SystemParticipantTestData.storageInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), + "operatesFrom" : SystemParticipantTestData.storageInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), "operator" : SystemParticipantTestData.storageInput.operator.getUuid().toString(), "qCharacteristics": SystemParticipantTestData.cosPhiFixedDeSerialized, "type" : SystemParticipantTestData.storageInput.type.getUuid().toString() @@ -279,8 +280,8 @@ class InputEntityProcessorTest extends Specification { "uuid" : SystemParticipantTestData.hpInput.uuid.toString(), "id" : SystemParticipantTestData.hpInput.id, "node" : SystemParticipantTestData.hpInput.node.uuid.toString(), - "operatesUntil" : TimeTools.toString(SystemParticipantTestData.hpInput.operationTime.endDate.orElse(ZonedDateTime.now())), - "operatesFrom" : TimeTools.toString(SystemParticipantTestData.hpInput.operationTime.startDate.orElse(ZonedDateTime.now())), + "operatesUntil" : SystemParticipantTestData.hpInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), + "operatesFrom" : SystemParticipantTestData.hpInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), "operator" : SystemParticipantTestData.hpInput.operator.getUuid().toString(), "qCharacteristics": SystemParticipantTestData.cosPhiFixedDeSerialized, "thermalBus" : SystemParticipantTestData.hpInput.thermalBus.uuid.toString(), @@ -293,11 +294,11 @@ class InputEntityProcessorTest extends Specification { InputEntityProcessor processor = new InputEntityProcessor(NodeGraphicInput) NodeGraphicInput validNode = GridTestData.nodeGraphicC Map expected = [ - "uuid" : "09aec636-791b-45aa-b981-b14edf171c4c", - "graphicLayer" : "main", - "path" : "", - "point" : "{\"type\":\"Point\",\"coordinates\":[0.0,10],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", - "node" : "bd837a25-58f3-44ac-aa90-c6b6e3cd91b2" + "uuid" : "09aec636-791b-45aa-b981-b14edf171c4c", + "graphicLayer": "main", + "path" : "", + "point" : "{\"type\":\"Point\",\"coordinates\":[0.0,10],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "node" : "bd837a25-58f3-44ac-aa90-c6b6e3cd91b2" ] when: @@ -313,11 +314,11 @@ class InputEntityProcessorTest extends Specification { InputEntityProcessor processor = new InputEntityProcessor(NodeGraphicInput) NodeGraphicInput validNode = GridTestData.nodeGraphicD Map expected = [ - "uuid" : "9ecad435-bd16-4797-a732-762c09d4af25", - "graphicLayer" : "main", - "path" : "{\"type\":\"LineString\",\"coordinates\":[[-1,0.0],[1,0.0]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", - "point" : "", - "node" : "6e0980e0-10f2-4e18-862b-eb2b7c90509b" + "uuid" : "9ecad435-bd16-4797-a732-762c09d4af25", + "graphicLayer": "main", + "path" : "{\"type\":\"LineString\",\"coordinates\":[[-1,0.0],[1,0.0]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "point" : "", + "node" : "6e0980e0-10f2-4e18-862b-eb2b7c90509b" ] when: @@ -333,10 +334,10 @@ class InputEntityProcessorTest extends Specification { InputEntityProcessor processor = new InputEntityProcessor(LineGraphicInput) LineGraphicInput validNode = GridTestData.lineGraphicCtoD Map expected = [ - "uuid" : "ece86139-3238-4a35-9361-457ecb4258b0", - "graphicLayer" : "main", - "path" : "{\"type\":\"LineString\",\"coordinates\":[[0.0,0.0],[0.0,10]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", - "line" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7" + "uuid" : "ece86139-3238-4a35-9361-457ecb4258b0", + "graphicLayer": "main", + "path" : "{\"type\":\"LineString\",\"coordinates\":[[0.0,0.0],[0.0,10]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "line" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7" ] when: @@ -352,8 +353,8 @@ class InputEntityProcessorTest extends Specification { InputEntityProcessor processor = new InputEntityProcessor(OperatorInput) OperatorInput operator = new OperatorInput(UUID.fromString("420ee39c-dd5a-4d9c-9156-23dbdef13e5e"), "Prof. Brokkoli") Map expected = [ - "uuid" : "420ee39c-dd5a-4d9c-9156-23dbdef13e5e", - "id" : "Prof. Brokkoli" + "uuid": "420ee39c-dd5a-4d9c-9156-23dbdef13e5e", + "id" : "Prof. Brokkoli" ] when: @@ -381,17 +382,17 @@ class InputEntityProcessorTest extends Specification { 9.10 ) Map expected = [ - "uuid" : "a5b0f432-27b5-4b3e-b87a-61867b9edd79", - "quarterHour" : "4", - "kWd" : "1.2", - "kSa" : "2.3", - "kSu" : "3.4", - "myWd" : "4.5", - "mySa" : "5.6", - "mySu" : "6.7", - "sigmaWd" : "7.8", - "sigmaSa" : "8.9", - "sigmaSu" : "9.1" + "uuid" : "a5b0f432-27b5-4b3e-b87a-61867b9edd79", + "quarterHour": "4", + "kWd" : "1.2", + "kSa" : "2.3", + "kSu" : "3.4", + "myWd" : "4.5", + "mySa" : "5.6", + "mySu" : "6.7", + "sigmaWd" : "7.8", + "sigmaSa" : "8.9", + "sigmaSu" : "9.1" ] when: @@ -407,16 +408,16 @@ class InputEntityProcessorTest extends Specification { InputEntityProcessor processor = new InputEntityProcessor(WecTypeInput) WecTypeInput type = TypeTestData.wecType Map expected = [ - "uuid" : "a24fc5b9-a26f-44de-96b8-c9f50b665cb3", - "id" : "Test wec type", - "capex" : "100.0", - "opex" : "101.0", - "cosphiRated" : "0.95", - "cpCharacteristic" : "cP:{(10.00,0.05),(15.00,0.10),(20.00,0.20)}", - "etaConv" : "90.0", - "sRated" : "2500.0", - "rotorArea" : "2000.0", - "hubHeight" : "130.0" + "uuid" : "a24fc5b9-a26f-44de-96b8-c9f50b665cb3", + "id" : "Test wec type", + "capex" : "100.0", + "opex" : "101.0", + "cosphiRated" : "0.95", + "cpCharacteristic": "cP:{(10.00,0.05),(15.00,0.10),(20.00,0.20)}", + "etaConv" : "90.0", + "sRated" : "2500.0", + "rotorArea" : "2000.0", + "hubHeight" : "130.0" ] when: @@ -432,21 +433,21 @@ class InputEntityProcessorTest extends Specification { InputEntityProcessor processor = new InputEntityProcessor(Transformer2WTypeInput) Transformer2WTypeInput type = GridTestData.transformerTypeBtoD Map expected = [ - "uuid" : "202069a7-bcf8-422c-837c-273575220c8a", - "id" : "HS-MS_1", - "rSc" : "45.375", - "xSc" : "102.759", - "gM" : "0.0", - "bM" : "0.0", - "sRated" : "20000.0", - "vRatedA" : "110.0", - "vRatedB" : "20.0", - "dV" : "1.5", - "dPhi" : "0.0", - "tapSide" : "false", - "tapNeutr" : "0", - "tapMax" : "10", - "tapMin" : "-10" + "uuid" : "202069a7-bcf8-422c-837c-273575220c8a", + "id" : "HS-MS_1", + "rSc" : "45.375", + "xSc" : "102.759", + "gM" : "0.0", + "bM" : "0.0", + "sRated" : "20000.0", + "vRatedA" : "110.0", + "vRatedB" : "20.0", + "dV" : "1.5", + "dPhi" : "0.0", + "tapSide" : "false", + "tapNeutr": "0", + "tapMax" : "10", + "tapMin" : "-10" ] when: @@ -462,27 +463,27 @@ class InputEntityProcessorTest extends Specification { InputEntityProcessor processor = new InputEntityProcessor(Transformer3WTypeInput) Transformer3WTypeInput type = GridTestData.transformerTypeAtoBtoC Map expected = [ - "uuid" : "5b0ee546-21fb-4a7f-a801-5dbd3d7bb356", - "id" : "HöS-HS-MS_1", - "sRatedA" : "120000.0", - "sRatedB" : "60000.0", - "sRatedC" : "40000.0", - "vRatedA" : "380.0", - "vRatedB" : "110.0", - "vRatedC" : "20.0", - "rScA" : "0.3", - "rScB" : "0.025", - "rScC" : "8.0E-4", - "xScA" : "1.0", - "xScB" : "0.08", - "xScC" : "0.003", - "gM" : "40000.0", - "bM" : "1000.0", - "dV" : "1.5", - "dPhi" : "0.0", - "tapNeutr" : "0", - "tapMin" : "-10", - "tapMax" : "10" + "uuid" : "5b0ee546-21fb-4a7f-a801-5dbd3d7bb356", + "id" : "HöS-HS-MS_1", + "sRatedA" : "120000.0", + "sRatedB" : "60000.0", + "sRatedC" : "40000.0", + "vRatedA" : "380.0", + "vRatedB" : "110.0", + "vRatedC" : "20.0", + "rScA" : "0.3", + "rScB" : "0.025", + "rScC" : "8.0E-4", + "xScA" : "1.0", + "xScB" : "0.08", + "xScC" : "0.003", + "gM" : "40000.0", + "bM" : "1000.0", + "dV" : "1.5", + "dPhi" : "0.0", + "tapNeutr": "0", + "tapMin" : "-10", + "tapMax" : "10" ] when: @@ -521,14 +522,14 @@ class InputEntityProcessorTest extends Specification { InputEntityProcessor processor = new InputEntityProcessor(EvTypeInput) EvTypeInput type = TypeTestData.evType Map expected = [ - "uuid" : "66b0db5d-b2fb-41d0-a9bc-990d6b6a36db", - "id" : "ev type", - "capex" : "100.0", - "opex" : "101.0", - "eStorage" : "100.0", - "eCons" : "23.0", - "sRated" : "22.0", - "cosphiRated" : "0.9" + "uuid" : "66b0db5d-b2fb-41d0-a9bc-990d6b6a36db", + "id" : "ev type", + "capex" : "100.0", + "opex" : "101.0", + "eStorage" : "100.0", + "eCons" : "23.0", + "sRated" : "22.0", + "cosphiRated": "0.9" ] when: @@ -544,16 +545,16 @@ class InputEntityProcessorTest extends Specification { InputEntityProcessor processor = new InputEntityProcessor(ChpTypeInput) ChpTypeInput type = TypeTestData.chpType Map expected = [ - "uuid" : "1c027d3e-5409-4e52-a0e2-f8a23d5d0af0", - "id" : "chp type", - "capex" : "100.0", - "opex" : "101.0", - "etaEl" : "95.0", - "etaThermal" : "90.0", - "sRated" : "58.0", - "cosphiRated" : "0.98", - "pThermal" : "49.59", - "pOwn" : "5.0" + "uuid" : "1c027d3e-5409-4e52-a0e2-f8a23d5d0af0", + "id" : "chp type", + "capex" : "100.0", + "opex" : "101.0", + "etaEl" : "95.0", + "etaThermal" : "90.0", + "sRated" : "58.0", + "cosphiRated": "0.98", + "pThermal" : "49.59", + "pOwn" : "5.0" ] when: @@ -569,13 +570,13 @@ class InputEntityProcessorTest extends Specification { InputEntityProcessor processor = new InputEntityProcessor(HpTypeInput) HpTypeInput type = TypeTestData.hpType Map expected = [ - "uuid" : "1059ef51-9e17-4c13-928c-7c1c716d4ee6", - "id" : "hp type", - "capex" : "100.0", - "opex" : "101.0", - "sRated" : "45.0", - "cosphiRated" : "0.975", - "pThermal" : "26.3" + "uuid" : "1059ef51-9e17-4c13-928c-7c1c716d4ee6", + "id" : "hp type", + "capex" : "100.0", + "opex" : "101.0", + "sRated" : "45.0", + "cosphiRated": "0.975", + "pThermal" : "26.3" ] when: @@ -591,14 +592,14 @@ class InputEntityProcessorTest extends Specification { InputEntityProcessor processor = new InputEntityProcessor(BmTypeInput) BmTypeInput type = TypeTestData.bmType Map expected = [ - "uuid" : "c3bd30f5-1a62-4a37-86e3-074040d965a4", - "id" : "bm type", - "capex" : "100.0", - "opex" : "101.0", - "activePowerGradient" : "5.0", - "sRated" : "800.0", - "cosphiRated" : "0.965", - "etaConv" : "89.0" + "uuid" : "c3bd30f5-1a62-4a37-86e3-074040d965a4", + "id" : "bm type", + "capex" : "100.0", + "opex" : "101.0", + "activePowerGradient": "5.0", + "sRated" : "800.0", + "cosphiRated" : "0.965", + "etaConv" : "89.0" ] when: @@ -614,19 +615,19 @@ class InputEntityProcessorTest extends Specification { InputEntityProcessor processor = new InputEntityProcessor(StorageTypeInput) StorageTypeInput type = TypeTestData.storageType Map expected = [ - "uuid" : "fbee4995-24dd-45e4-9c85-7d986fe99ff3", - "id" : "storage type", - "capex" : "100.0", - "opex" : "101.0", - "eStorage" : "200.0", - "sRated" : "13.0", - "cosphiRated" : "0.997", - "pMax" : "12.961", - "activePowerGradient" : "3.0", - "eta" : "92.0", - "dod" : "20.0", - "lifeTime" : "43800.0", - "lifeCycle" : "100000" + "uuid" : "fbee4995-24dd-45e4-9c85-7d986fe99ff3", + "id" : "storage type", + "capex" : "100.0", + "opex" : "101.0", + "eStorage" : "200.0", + "sRated" : "13.0", + "cosphiRated" : "0.997", + "pMax" : "12.961", + "activePowerGradient": "3.0", + "eta" : "92.0", + "dod" : "20.0", + "lifeTime" : "43800.0", + "lifeCycle" : "100000" ] when: @@ -636,4 +637,39 @@ class InputEntityProcessorTest extends Specification { actual.present actual.get() == expected } + + def "The InputEntityProcessor should deserialize an entity but ignore the operator field when OperatorInput is equal to NO_OPERATOR_ASSIGNED"() { + given: + InputEntityProcessor processor = new InputEntityProcessor(NodeInput) + def nodeWithOutOperator = new NodeInput( + UUID.fromString("6e0980e0-10f2-4e18-862b-eb2b7c90509b"), "node_d", OperatorInput.NO_OPERATOR_ASSIGNED, + OperationTime.notLimited() + , + Quantities.getQuantity(1d, PU), + false, + null, + GermanVoltageLevelUtils.MV_20KV, + 4) + + Map expected = [ + "geoPosition" : "", + "id" : "node_d", + "operatesFrom" : "", + "operatesUntil": "", + "operator" : "", + "slack" : "false", + "subnet" : "4", + "uuid" : "6e0980e0-10f2-4e18-862b-eb2b7c90509b", + "vRated" : "20.0", + "vTarget" : "1.0", + "voltLvl" : "Mittelspannung" + ] + + when: + Optional> actual = processor.handleEntity(nodeWithOutOperator) + + then: + actual.present + actual.get() == expected + } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/processor/result/ResultEntityProcessorTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/processor/result/ResultEntityProcessorTest.groovy index d413d541f..55ab50980 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/processor/result/ResultEntityProcessorTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/processor/result/ResultEntityProcessorTest.groovy @@ -54,7 +54,7 @@ class ResultEntityProcessorTest extends Specification { inputModel: '22bea5fc-2cb2-4c61-beb9-b476e0107f52', p : '0.01', q : '0.01', - timestamp : '2020-01-30 17:26:44'] + timestamp : '2020-01-30T17:26:44Z[UTC]'] @Shared def expectedSocResults = [uuid : '22bea5fc-2cb2-4c61-beb9-b476e0107f52', @@ -62,7 +62,7 @@ class ResultEntityProcessorTest extends Specification { p : '0.01', q : '0.01', soc : '50.0', - timestamp : '2020-01-30 17:26:44'] + timestamp : '2020-01-30T17:26:44Z[UTC]'] def "A ResultEntityProcessor should de-serialize a provided SystemParticipantResult correctly"() { @@ -80,15 +80,15 @@ class ResultEntityProcessorTest extends Specification { where: modelClass | validSystemParticipantResult || expectedResults - LoadResult | new LoadResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) || expectedStandardResults - FixedFeedInResult | new FixedFeedInResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) || expectedStandardResults - BmResult | new BmResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) || expectedStandardResults - EvResult | new EvResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q, soc) || expectedSocResults - PvResult | new PvResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) || expectedStandardResults - EvcsResult | new EvcsResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) || expectedStandardResults - ChpResult | new ChpResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) || expectedStandardResults - WecResult | new WecResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) || expectedStandardResults - StorageResult | new StorageResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q, soc) || expectedSocResults + LoadResult | new LoadResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, p, q) || expectedStandardResults + FixedFeedInResult | new FixedFeedInResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, p, q) || expectedStandardResults + BmResult | new BmResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, p, q) || expectedStandardResults + EvResult | new EvResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, p, q, soc) || expectedSocResults + PvResult | new PvResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, p, q) || expectedStandardResults + EvcsResult | new EvcsResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, p, q) || expectedStandardResults + ChpResult | new ChpResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, p, q) || expectedStandardResults + WecResult | new WecResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, p, q) || expectedStandardResults + StorageResult | new StorageResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, p, q, soc) || expectedSocResults } @@ -96,7 +96,7 @@ class ResultEntityProcessorTest extends Specification { given: TimeTools.initialize(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd HH:mm:ss") def sysPartResProcessor = new ResultEntityProcessor(StorageResult) - def storageResult = new StorageResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q, null) + def storageResult = new StorageResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, p, q, null) when: @@ -109,7 +109,7 @@ class ResultEntityProcessorTest extends Specification { p : '0.01', q : '0.01', soc : '', - timestamp : '2020-01-30 17:26:44'] + timestamp : '2020-01-30T17:26:44Z[UTC]'] } @@ -117,7 +117,7 @@ class ResultEntityProcessorTest extends Specification { given: TimeTools.initialize(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd HH:mm:ss") def sysPartResProcessor = new ResultEntityProcessor(LoadResult) - def storageResult = new StorageResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q, null) + def storageResult = new StorageResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, p, q, null) when: sysPartResProcessor.handleEntity(storageResult) @@ -135,13 +135,13 @@ class ResultEntityProcessorTest extends Specification { Quantity vMag = Quantities.getQuantity(0.95, PowerSystemUnits.PU) Quantity vAng = Quantities.getQuantity(45, StandardUnits.VOLTAGE_ANGLE) - def validResult = new NodeResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, vMag, vAng) + def validResult = new NodeResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, vMag, vAng) def expectedResults = [uuid : '22bea5fc-2cb2-4c61-beb9-b476e0107f52', inputModel: '22bea5fc-2cb2-4c61-beb9-b476e0107f52', vAng : '45.0', vMag : '0.95', - timestamp : '2020-01-30 17:26:44'] + timestamp : '2020-01-30T17:26:44Z[UTC]'] when: def validProcessedElement = sysPartResProcessor.handleEntity(validResult) @@ -159,7 +159,7 @@ class ResultEntityProcessorTest extends Specification { iAAng : '45.0', iBMag : '150.0', iBAng : '30.0', - timestamp : '2020-01-30 17:26:44'] + timestamp : '2020-01-30T17:26:44Z[UTC]'] @Shared def expectedTrafo2WResults = [uuid : '22bea5fc-2cb2-4c61-beb9-b476e0107f52', @@ -169,7 +169,7 @@ class ResultEntityProcessorTest extends Specification { iBMag : '150.0', iBAng : '30.0', tapPos : '5', - timestamp : '2020-01-30 17:26:44'] + timestamp : '2020-01-30T17:26:44Z[UTC]'] @Shared @@ -182,7 +182,7 @@ class ResultEntityProcessorTest extends Specification { iCMag : '300.0', iCAng : '70.0', tapPos : '5', - timestamp : '2020-01-30 17:26:44'] + timestamp : '2020-01-30T17:26:44Z[UTC]'] @Shared def expectedSwitchResults = [uuid : '22bea5fc-2cb2-4c61-beb9-b476e0107f52', @@ -192,7 +192,7 @@ class ResultEntityProcessorTest extends Specification { iBMag : '150.0', iBAng : '30.0', closed : 'true', - timestamp : '2020-01-30 17:26:44'] + timestamp : '2020-01-30T17:26:44Z[UTC]'] @Shared @@ -229,10 +229,10 @@ class ResultEntityProcessorTest extends Specification { where: modelClass | validConnectorResult || expectedResults - LineResult | new LineResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, iAMag, iAAng, iBMag, iBAng) || expectedLineResults - SwitchResult | new SwitchResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, iAMag, iAAng, iBMag, iBAng, closed) || expectedSwitchResults - Transformer2WResult | new Transformer2WResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, iAMag, iAAng, iBMag, iBAng, tapPos) || expectedTrafo2WResults - Transformer3WResult | new Transformer3WResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, iAMag, iAAng, iBMag, iBAng, iCMag, iCAng, tapPos) || expectedTrafo3WResults + LineResult | new LineResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, iAMag, iAAng, iBMag, iBAng) || expectedLineResults + SwitchResult | new SwitchResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, iAMag, iAAng, iBMag, iBAng, closed) || expectedSwitchResults + Transformer2WResult | new Transformer2WResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, iAMag, iAAng, iBMag, iBAng, tapPos) || expectedTrafo2WResults + Transformer3WResult | new Transformer3WResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, iAMag, iAAng, iBMag, iBAng, iCMag, iCAng, tapPos) || expectedTrafo3WResults } def "A ResultEntityProcessor should de-serialize a CylindricalStorageResult correctly"() { @@ -244,14 +244,14 @@ class ResultEntityProcessorTest extends Specification { Quantity energy = Quantities.getQuantity(3, StandardUnits.ENERGY_RESULT) Quantity fillLevel = Quantities.getQuantity(20, Units.PERCENT) - def validResult = new CylindricalStorageResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, energy, qDot, fillLevel) + def validResult = new CylindricalStorageResult(uuid, ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), inputModel, energy, qDot, fillLevel) def expectedResults = [uuid : '22bea5fc-2cb2-4c61-beb9-b476e0107f52', energy : '3.0', fillLevel : '20.0', inputModel: '22bea5fc-2cb2-4c61-beb9-b476e0107f52', qDot : '2.0', - timestamp : '2020-01-30 17:26:44'] + timestamp : '2020-01-30T17:26:44Z[UTC]'] when: def validProcessedElement = sysPartResProcessor.handleEntity(validResult) @@ -268,7 +268,7 @@ class ResultEntityProcessorTest extends Specification { TimeTools.initialize(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd HH:mm:ss") def sysPartResProcessor = new ResultEntityProcessor(ResultEntityProcessor.eligibleEntityClasses.get(0)) - def invalidClassResult = new InvalidTestResult(TimeTools.toZonedDateTime("2020-01-30 17:26:44"), uuid) + def invalidClassResult = new InvalidTestResult(ZonedDateTime.parse("2020-01-30T17:26:44Z[UTC]"), uuid) when: sysPartResProcessor.handleEntity(invalidClassResult) diff --git a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy index 626feced8..a4326c741 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy @@ -5,7 +5,6 @@ */ package edu.ie3.datamodel.io.sink -import edu.ie3.datamodel.exceptions.SinkException import edu.ie3.datamodel.io.FileNamingStrategy import edu.ie3.datamodel.io.processor.ProcessorProvider import edu.ie3.datamodel.io.processor.input.InputEntityProcessor @@ -18,6 +17,7 @@ import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.connector.LineInput import edu.ie3.datamodel.models.input.connector.Transformer2WInput import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput +import edu.ie3.datamodel.models.input.connector.type.LineTypeInput import edu.ie3.datamodel.models.input.graphics.LineGraphicInput import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput import edu.ie3.datamodel.models.input.thermal.CylindricalStorageInput @@ -32,7 +32,7 @@ import edu.ie3.datamodel.models.value.EnergyPriceValue import edu.ie3.test.common.GridTestData import edu.ie3.test.common.TimeSeriesTestData import edu.ie3.test.common.ThermalUnitInputTestData -import edu.ie3.util.TimeTools +import edu.ie3.util.TimeUtil import edu.ie3.util.io.FileIOUtils import spock.lang.Shared import spock.lang.Specification @@ -56,10 +56,11 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { def "A valid CsvFileSink called by simple constructor should not initialize files by default and consist of several default values"() { given: CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath) - csvFileSink.dataConnector.shutdown() + csvFileSink.shutdown() expect: !new File(testBaseFolderPath).exists() + csvFileSink.csvSep == "," } def "A valid CsvFileSink with 'initFiles' enabled should create files as expected"() { @@ -72,7 +73,7 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { new FileNamingStrategy(), true, ",") - csvFileSink.dataConnector.shutdown() + csvFileSink.shutdown() expect: new File(testBaseFolderPath).exists() @@ -96,7 +97,8 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { new InputEntityProcessor(ThermalHouseInput), new InputEntityProcessor(OperatorInput), new InputEntityProcessor(LineInput), - new InputEntityProcessor(ThermalBusInput) + new InputEntityProcessor(ThermalBusInput), + new InputEntityProcessor(LineTypeInput) ], [] as Map), new FileNamingStrategy(), false, @@ -106,8 +108,8 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { UUID inputModel = UUID.fromString("22bea5fc-2cb2-4c61-beb9-b476e0107f52") Quantity p = Quantities.getQuantity(10, StandardUnits.ACTIVE_POWER_IN) Quantity q = Quantities.getQuantity(10, StandardUnits.REACTIVE_POWER_IN) - PvResult pvResult = new PvResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) - WecResult wecResult = new WecResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) + PvResult pvResult = new PvResult(uuid, TimeUtil.withDefaults.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) + WecResult wecResult = new WecResult(uuid, TimeUtil.withDefaults.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) when: csvFileSink.persistAll([ @@ -119,7 +121,7 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { ThermalUnitInputTestData.cylindricStorageInput, ThermalUnitInputTestData.thermalHouseInput ]) - csvFileSink.dataConnector.shutdown() + csvFileSink.shutdown() then: new File(testBaseFolderPath).exists() @@ -140,30 +142,6 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { !new File(testBaseFolderPath + File.separator + "ev_res.csv").exists() } - def "A valid CsvFileSink should throw an exception if the provided entity cannot be handled"() { - given: - CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, - new ProcessorProvider([ - new ResultEntityProcessor(PvResult) - ], [] as Map), - new FileNamingStrategy(), - false, - ",") - - UUID uuid = UUID.fromString("22bea5fc-2cb2-4c61-beb9-b476e0107f52") - UUID inputModel = UUID.fromString("22bea5fc-2cb2-4c61-beb9-b476e0107f52") - Quantity p = Quantities.getQuantity(10, StandardUnits.ACTIVE_POWER_IN) - Quantity q = Quantities.getQuantity(10, StandardUnits.REACTIVE_POWER_IN) - WecResult wecResult = new WecResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) - - when: - csvFileSink.persist(wecResult) - csvFileSink.dataConnector.shutdown() - - then: - thrown(SinkException) - } - def "A valid CsvFileSink should persist a time series correctly"() { given: TimeSeriesProcessor timeSeriesProcessor = new TimeSeriesProcessor<>(IndividualTimeSeries, TimeBasedValue, EnergyPriceValue) @@ -181,7 +159,7 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { when: csvFileSink.persist(individualTimeSeries) - csvFileSink.dataConnector.shutdown() + csvFileSink.shutdown() then: new File(testBaseFolderPath).exists() @@ -194,7 +172,7 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { when: csvFileSink.persistAll(allTimeSeries) - csvFileSink.dataConnector.shutdown() + csvFileSink.shutdown() then: new File(testBaseFolderPath).exists() diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy new file mode 100644 index 000000000..5fb98162d --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy @@ -0,0 +1,361 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.source.csv + +import edu.ie3.datamodel.io.FileNamingStrategy +import edu.ie3.datamodel.models.UniqueEntity +import edu.ie3.datamodel.models.input.NodeInput +import edu.ie3.datamodel.models.input.OperatorInput +import edu.ie3.test.common.SystemParticipantTestData as sptd +import edu.ie3.test.common.GridTestData as gtd +import spock.lang.Shared +import spock.lang.Specification + +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.atomic.LongAdder +import java.util.stream.Collectors + + +class CsvDataSourceTest extends Specification { + + // Using a groovy bug to gain access to private methods in superclass: + // by default, we cannot access private methods with parameters from abstract parent classes, introducing a + // class that extends the abstract parent class and unveils the private methods by calling the parents private + // methods in a public or protected method makes them available for testing + private final class DummyCsvSource extends CsvDataSource { + + DummyCsvSource(String csvSep, String folderPath, FileNamingStrategy fileNamingStrategy) { + super(csvSep, folderPath, fileNamingStrategy) + } + + Map buildFieldsToAttributes( + final String csvRow, final String[] headline) { + return super.buildFieldsToAttributes(csvRow, headline) + } + + OperatorInput getFirstOrDefaultOperator( + Collection operators, String operatorUuid) { + return super.getFirstOrDefaultOperator(operators, operatorUuid) + } + + def Set> distinctRowsWithLog( + Class entityClass, Collection> allRows) { + return super.distinctRowsWithLog(entityClass, allRows) + } + + String[] fieldVals( + String csvSep, String csvRow) { + return super.fieldVals(csvSep, csvRow) + } + + } + + @Shared + String csvSep = "," + String testBaseFolderPath = new File(getClass().getResource('/testGridFiles').toURI()).getAbsolutePath() + FileNamingStrategy fileNamingStrategy = new FileNamingStrategy() + + DummyCsvSource dummyCsvSource = new DummyCsvSource(csvSep, testBaseFolderPath, fileNamingStrategy) + + def "A DataSource should contain a valid connector after initialization"() { + expect: + dummyCsvSource.connector != null + dummyCsvSource.connector.baseFolderName == testBaseFolderPath + dummyCsvSource.connector.fileNamingStrategy == fileNamingStrategy + dummyCsvSource.connector.entityWriters.isEmpty() + + } + + def "A CsvDataSource should build a valid fields to attributes map with valid data as expected"() { + given: + def validHeadline = [ + "uuid", + "active_power_gradient", + "capex", + "cosphi_rated", + "eta_conv", + "id", + "opex", + "s_rated", + "olmcharacteristic", + "cosPhiFixed"] as String[] + def validCsvRow = "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,25.0,100.0,0.95,98.0,test_bmTypeInput,50.0,25.0,olm:{(0.0,1.0)},cosPhiFixed:{(0.0,1.0)}" + + expect: + dummyCsvSource.buildFieldsToAttributes(validCsvRow, validHeadline) == [ + activePowerGradient: "25.0", + capex : "100.0", + cosphiRated : "0.95", + etaConv : "98.0", + id : "test_bmTypeInput", + opex : "50.0", + sRated : "25.0", + uuid : "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8", + olmcharacteristic : "olm:{(0.0,1.0)}", + cosPhiFixed : "cosPhiFixed:{(0.0,1.0)}" + ] + + } + + def "A CsvDataSource should be able to handle a variety of different csvRows correctly"() { + expect: + dummyCsvSource.fieldVals(csvSep, csvRow) as List == resultingArray + + where: + csvSep | csvRow || resultingArray + "," | "4ca90220-74c2-4369-9afa-a18bf068840d,{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}},node_a,2020-03-25T15:11:31Z[UTC],2020-03-24T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,true,1,1.0,Höchstspannung,380.0,olm:{(0.00,1.00)},cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" || [ + "4ca90220-74c2-4369-9afa-a18bf068840d", + "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "node_a", + "2020-03-25T15:11:31Z[UTC]", + "2020-03-24T15:11:31Z[UTC]", + "8f9682df-0744-4b58-a122-f0dc730f6510", + "true", + "1", + "1.0", + "Höchstspannung", + "380.0", + "olm:{(0.00,1.00)}", + "cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" + ] + "," | "\"4ca90220-74c2-4369-9afa-a18bf068840d\",\"{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}\",\"node_a\",\"2020-03-25T15:11:31Z[UTC]\",\"2020-03-24T15:11:31Z[UTC]\",\"8f9682df-0744-4b58-a122-f0dc730f6510\",\"true\",\"1\",\"1.0\",\"Höchstspannung\",\"380.0\",\"olm:{(0.00,1.00)}\",\"cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}\"" || [ + "4ca90220-74c2-4369-9afa-a18bf068840d", + "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "node_a", + "2020-03-25T15:11:31Z[UTC]", + "2020-03-24T15:11:31Z[UTC]", + "8f9682df-0744-4b58-a122-f0dc730f6510", + "true", + "1", + "1.0", + "Höchstspannung", + "380.0", + "olm:{(0.00,1.00)}", + "cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" + ] + ";" | "4ca90220-74c2-4369-9afa-a18bf068840d;cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)};{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}};node_a;2020-03-25T15:11:31Z[UTC];2020-03-24T15:11:31Z[UTC];8f9682df-0744-4b58-a122-f0dc730f6510;true;1;1.0;Höchstspannung;380.0;olm:{(0.00,1.00)};cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" || [ + "4ca90220-74c2-4369-9afa-a18bf068840d", + "cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}", + "{(0.0,1.0),(0.9,1.0),(1.2,-0.3)};{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "node_a", + "2020-03-25T15:11:31Z[UTC]", + "2020-03-24T15:11:31Z[UTC]", + "8f9682df-0744-4b58-a122-f0dc730f6510", + "true", + "1", + "1.0", + "Höchstspannung", + "380.0", + "olm:{(0.00,1.00)}", + "cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" + ] + ";" | "\"4ca90220-74c2-4369-9afa-a18bf068840d\";\"{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}\";\"node_a\";\"2020-03-25T15:11:31Z[UTC]\";\"2020-03-24T15:11:31Z[UTC]\";\"8f9682df-0744-4b58-a122-f0dc730f6510\";\"true\";\"1\";\"1.0\";\"Höchstspannung\";\"380.0\";\"olm:{(0.00,1.00)}\";\"cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}\"" || [ + "4ca90220-74c2-4369-9afa-a18bf068840d", + "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "node_a", + "2020-03-25T15:11:31Z[UTC]", + "2020-03-24T15:11:31Z[UTC]", + "8f9682df-0744-4b58-a122-f0dc730f6510", + "true", + "1", + "1.0", + "Höchstspannung", + "380.0", + "olm:{(0.00,1.00)}", + "cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" + ] + } + + + def "A CsvDataSource should build a valid fields to attributes map with valid data and empty value fields as expected"() { + given: + def validHeadline = [ + "uuid", + "active_power_gradient", + "capex", + "cosphi_rated", + "eta_conv", + "id", + "opex", + "s_rated", + "olmcharacteristic", + "cosPhiFixed"] as String[] + def validCsvRow = "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,25.0,100.0,0.95,98.0,test_bmTypeInput,50.0,25.0,olm:{(0.0,1.0)}," + + expect: + dummyCsvSource.buildFieldsToAttributes(validCsvRow, validHeadline) == [ + activePowerGradient: "25.0", + capex : "100.0", + cosphiRated : "0.95", + etaConv : "98.0", + id : "test_bmTypeInput", + opex : "50.0", + sRated : "25.0", + uuid : "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8", + olmcharacteristic : "olm:{(0.0,1.0)}", + cosPhiFixed : "" + ] + + } + + def "A CsvDataSource should be able to handle several errors when the csvRow is invalid or cannot be processed"() { + given: + def validHeadline = [ + "uuid", + "active_power_gradient", + "capex", + "cosphi_rated", + "eta_conv", + "id", + "opex", + "s_rated"] as String[] + + expect: + dummyCsvSource.buildFieldsToAttributes(invalidCsvRow, validHeadline) == [:] + + where: + invalidCsvRow || explaination + "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8;25.0;100.0;0.95;98.0;test_bmTypeInput;50.0;25.0" || "wrong separator" + "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,25.0,100.0,0.95,98.0,test_bmTypeInput" || "too less columns" + "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,25.0,100.0,0.95,98.0,test_bmTypeInput,,,," || "too much columns" + + } + + def "A CsvDataSource should always return an operator. Either the found one (if any) or OperatorInput.NO_OPERATOR_ASSIGNED"() { + + expect: + dummyCsvSource.getFirstOrDefaultOperator(operators, operatorUuid) == expectedOperator + + where: + operatorUuid | operators || expectedOperator + "8f9682df-0744-4b58-a122-f0dc730f6510" | [sptd.hpInput.operator]|| sptd.hpInput.operator + "8f9682df-0744-4b58-a122-f0dc730f6520" | [sptd.hpInput.operator]|| OperatorInput.NO_OPERATOR_ASSIGNED + "8f9682df-0744-4b58-a122-f0dc730f6510" | []|| OperatorInput.NO_OPERATOR_ASSIGNED + + } + + def "A CsvDataSource should collect be able to collect empty optionals when asked to do so"() { + + given: + ConcurrentHashMap, LongAdder> emptyCollector = new ConcurrentHashMap<>() + def nodeInputOptionals = [ + Optional.of(sptd.hpInput.node), + Optional.empty(), + Optional.of(sptd.chpInput.node) + ] + + when: + def resultingList = nodeInputOptionals.stream().filter(dummyCsvSource.isPresentCollectIfNot(NodeInput, emptyCollector)).collect(Collectors.toList()) + + then: + emptyCollector.size() == 1 + emptyCollector.get(NodeInput).toInteger() == 1 + + resultingList.size() == 2 + resultingList.get(0) == Optional.of(sptd.hpInput.node) + resultingList.get(1) == Optional.of(sptd.chpInput.node) + } + + def "A CsvDataSource should return a given collection of csv row mappings as distinct rows collection correctly"() { + + given: + def nodeInputRow = [ + "uuid" : "4ca90220-74c2-4369-9afa-a18bf068840d", + "geo_position" : "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "id" : "node_a", + "operates_until": "2020-03-25T15:11:31Z[UTC]", + "operates_from" : "2020-03-24T15:11:31Z[UTC]", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "slack" : "true", + "subnet" : "1", + "v_target" : "1.0", + "volt_lvl" : "Höchstspannung", + "v_rated" : "380" + ] + + when: + def allRows = [nodeInputRow]* noOfEntities + def distinctRows = dummyCsvSource.distinctRowsWithLog(NodeInput, allRows) + + then: + distinctRows.size() == distinctSize + distinctRows[0] == firstElement + + where: + noOfEntities || distinctSize || firstElement + 0 || 0 || null + 10 || 1 || ["uuid" : "4ca90220-74c2-4369-9afa-a18bf068840d", + "geo_position" : "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "id" : "node_a", + "operates_until": "2020-03-25T15:11:31Z[UTC]", + "operates_from" : "2020-03-24T15:11:31Z[UTC]", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "slack" : "true", + "subnet" : "1", + "v_target" : "1.0", + "volt_lvl" : "Höchstspannung", + "v_rated" : "380"] + + } + + def "A CsvDataSource should return an empty set of csv row mappings if the provided collection of mappings contains duplicated UUIDs with different data"() { + + given: + def nodeInputRow1 = [ + "uuid" : "4ca90220-74c2-4369-9afa-a18bf068840d", + "geo_position" : "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "id" : "node_a", + "operates_until": "2020-03-25T15:11:31Z[UTC]", + "operates_from" : "2020-03-24T15:11:31Z[UTC]", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "slack" : "true", + "subnet" : "1", + "v_target" : "1.0", + "volt_lvl" : "Höchstspannung", + "v_rated" : "380" + ] + def nodeInputRow2 = [ + "uuid" : "4ca90220-74c2-4369-9afa-a18bf068840d", + "geo_position" : "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "id" : "node_b", + "operates_until": "2020-03-25T15:11:31Z[UTC]", + "operates_from" : "2020-03-24T15:11:31Z[UTC]", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "slack" : "true", + "subnet" : "1", + "v_target" : "1.0", + "volt_lvl" : "Höchstspannung", + "v_rated" : "380" + ] + + when: + def allRows = [nodeInputRow1, nodeInputRow2]* 10 + def distinctRows = dummyCsvSource.distinctRowsWithLog(NodeInput, allRows) + + then: + distinctRows.size() == 0 + } + + def "A CsvDataSource should be able to handle the extraction process of an asset type correctly"() { + + when: + def assetTypeOpt = dummyCsvSource.getAssetType(types, fieldsToAttributes, "TestClassName") + + then: + assetTypeOpt.present == resultIsPresent + assetTypeOpt.ifPresent({ assetType -> + assert (assetType == resultData) + }) + + where: + types | fieldsToAttributes || resultIsPresent || resultData + []| ["type": "202069a7-bcf8-422c-837c-273575220c8a"] || false || null + []| ["bla": "foo"] || false || null + [gtd.transformerTypeBtoD]| ["type": "202069a7-bcf8-422c-837c-273575220c8a"] || true || gtd.transformerTypeBtoD + [sptd.chpTypeInput]| ["type": "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8"] || true || sptd.chpTypeInput + } + +} diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvGraphicSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvGraphicSourceTest.groovy new file mode 100644 index 000000000..73164c1a5 --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvGraphicSourceTest.groovy @@ -0,0 +1,169 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.source.csv + +import edu.ie3.datamodel.io.factory.input.graphics.LineGraphicInputEntityData +import edu.ie3.datamodel.io.factory.input.graphics.NodeGraphicInputEntityData +import edu.ie3.datamodel.io.source.RawGridSource +import edu.ie3.datamodel.models.input.NodeInput +import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput +import edu.ie3.test.common.GridTestData as gtd +import org.locationtech.jts.geom.LineString +import org.locationtech.jts.geom.Point +import spock.lang.Specification + +class CsvGraphicSourceTest extends Specification implements CsvTestDataMeta { + + + def "A CsvGraphicSource should provide an instance of GraphicElements based on valid input data correctly"() { + given: + def typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) + def rawGridSource = new CsvRawGridSource(csvSep, gridFolderPath, fileNamingStrategy, typeSource) + def csvGraphicSource = new CsvGraphicSource(csvSep, graphicsFolderPath, fileNamingStrategy, typeSource, rawGridSource) + + when: + def graphicElementsOpt = csvGraphicSource.getGraphicElements() + + then: + graphicElementsOpt.present + graphicElementsOpt.ifPresent({ + assert (it.allEntitiesAsList().size() == 3) + assert (it.nodeGraphics.size() == 2) + assert (it.lineGraphics.size() == 1) + }) + } + + def "A CsvGraphicSource should process invalid input data as expected when requested to provide an instance of GraphicElements"() { + given: + def typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) + def rawGridSource = Spy(CsvRawGridSource, constructorArgs: [ + csvSep, + gridFolderPath, + fileNamingStrategy, + typeSource + ]) { + // partly fake the return method of the csv raw grid source to always return empty node sets + // -> elements to build NodeGraphicInputs are missing + getNodes() >> new HashSet() + getNodes(_) >> new HashSet() + } as RawGridSource + + def csvGraphicSource = new CsvGraphicSource(csvSep, graphicsFolderPath, fileNamingStrategy, typeSource, rawGridSource) + + when: + def graphicElementsOpt = csvGraphicSource.getGraphicElements() + + then: + !graphicElementsOpt.present + } + + + def "A CsvGraphicSource should read and handle a valid node graphics file as expected"() { + given: + def csvGraphicSource = new CsvGraphicSource(csvSep, graphicsFolderPath, fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvRawGridSource)) + def expectedNodeGraphicD = new NodeGraphicInput( + gtd.nodeGraphicD.uuid, + gtd.nodeGraphicD.graphicLayer, + gtd.nodeGraphicD.path, + gtd.nodeD, + gtd.geoJsonReader.read("{ \"type\": \"Point\", \"coordinates\": [7.4116482, 51.4843281] }") as Point + ) + def expectedNodeGraphicC = new NodeGraphicInput( + gtd.nodeGraphicC.uuid, + gtd.nodeGraphicC.graphicLayer, + gtd.geoJsonReader.read("{ \"type\": \"LineString\", \"coordinates\": [[7.4116482, 51.4843281], [7.4116482, 51.4843281]]}") as LineString, + gtd.nodeC, + gtd.nodeGraphicC.point + ) + + when: + def nodeGraphics = csvGraphicSource.getNodeGraphicInput([gtd.nodeC, gtd.nodeD] as Set) + + then: + nodeGraphics.size() == 2 + nodeGraphics == [ + expectedNodeGraphicC, + expectedNodeGraphicD] as Set + } + + def "A CsvGraphicSource should read and handle a valid line graphics file as expected"() { + given: + def csvGraphicSource = new CsvGraphicSource(csvSep, graphicsFolderPath, fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvRawGridSource)) + + when: + def lineGraphics = csvGraphicSource.getLineGraphicInput([gtd.lineCtoD] as Set) + + then: + lineGraphics.size() == 1 + lineGraphics.first() == gtd.lineGraphicCtoD + } + + def "A CsvGraphicSource should build node graphic entity data from valid and invalid input data correctly"() { + given: + def csvGraphicSource = new CsvGraphicSource(csvSep, graphicsFolderPath, fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvRawGridSource)) + def fieldsToAttributesMap = [ + "uuid" : "09aec636-791b-45aa-b981-b14edf171c4c", + "graphic_layer": "main", + "node" : "bd837a25-58f3-44ac-aa90-c6b6e3cd91b2", + "path" : "", + "point" : "{\"type\":\"Point\",\"coordinates\":[0.0,10],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}" + ] + + expect: + def res = csvGraphicSource.buildNodeGraphicEntityData(fieldsToAttributesMap, nodeCollection as Set) + res.present == isPresent + + res.ifPresent({ value -> + assert value == new NodeGraphicInputEntityData([ + "uuid" : "09aec636-791b-45aa-b981-b14edf171c4c", + "graphic_layer": "main", + "path" : "", + "point" : "{\"type\":\"Point\",\"coordinates\":[0.0,10],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}" + ], gtd.nodeC) + assert value.node == gtd.nodeC + }) + + + where: + nodeCollection || isPresent + []|| false // no nodes provide + [gtd.nodeA, gtd.nodeB]|| false // node cannot be found + [gtd.nodeC]|| true // node found + + } + + def "A CsvGraphicSource should build line graphic entity data from valid and invalid input data correctly"() { + given: + def csvGraphicSource = new CsvGraphicSource(csvSep, graphicsFolderPath, fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvRawGridSource)) + def fieldsToAttributesMap = [ + "uuid" : "ece86139-3238-4a35-9361-457ecb4258b0", + "graphic_layer": "main", + "line" : "92ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "path" : "{\"type\":\"LineString\",\"coordinates\":[[0.0,0.0],[0.0,10]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}" + ] + + expect: + def res = csvGraphicSource.buildLineGraphicEntityData(fieldsToAttributesMap, nodeCollection as Set) + res.present == isPresent + + res.ifPresent({ value -> + assert value == new LineGraphicInputEntityData(["uuid" : "ece86139-3238-4a35-9361-457ecb4258b0", + "graphic_layer": "main", + "path" : "{\"type\":\"LineString\",\"coordinates\":[[0.0,0.0],[0.0,10]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}" + ] + , gtd.lineAtoB) + assert value.line == gtd.lineAtoB + }) + + + where: + nodeCollection || isPresent + []|| false // no nodes provide + [gtd.lineCtoD]|| false // line cannot be found + [gtd.lineAtoB]|| true // line found + + } +} diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvRawGridSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvRawGridSourceTest.groovy new file mode 100644 index 000000000..f30183169 --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvRawGridSourceTest.groovy @@ -0,0 +1,766 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.source.csv + +import edu.ie3.datamodel.io.factory.input.AssetInputEntityData +import edu.ie3.datamodel.io.factory.input.ConnectorInputEntityData +import edu.ie3.datamodel.io.factory.input.Transformer3WInputEntityData +import edu.ie3.datamodel.io.factory.input.TypedConnectorInputEntityData +import edu.ie3.datamodel.models.input.connector.LineInput +import edu.ie3.datamodel.models.input.connector.SwitchInput +import edu.ie3.datamodel.models.input.connector.Transformer3WInput +import edu.ie3.datamodel.models.input.container.RawGridElements +import edu.ie3.test.common.GridTestData +import edu.ie3.test.common.GridTestData as rgtd + +import spock.lang.Shared +import spock.lang.Specification + +import java.util.stream.Collectors +import java.util.stream.Stream + +class CsvRawGridSourceTest extends Specification implements CsvTestDataMeta { + @Shared + CsvRawGridSource source + + def setupSpec() { + CsvTypeSource typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) + source = new CsvRawGridSource(csvSep, gridFolderPath, fileNamingStrategy, typeSource) + } + + def "The CsvRawGridSource is able to convert single valid AssetInputEntityData to ConnectorInputEntityData"() { + given: "valid input data" + def fieldsToAttributes = [ + "uuid" : "5dc88077-aeb6-4711-9142-db57287640b1", + "id" : "test_switch_AtoB", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "nodeA" : "4ca90220-74c2-4369-9afa-a18bf068840d", + "nodeB" : "47d29df0-ba2d-4d23-8e75-c82229c5c758", + "closed" : "true" + ] + + def expectedFieldsToAttributes = [ + "uuid" : "5dc88077-aeb6-4711-9142-db57287640b1", + "id" : "test_switch_AtoB", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "closed" : "true" + ] + + def validAssetEntityInputData = new AssetInputEntityData(fieldsToAttributes, SwitchInput) + + def nodes = [rgtd.nodeA, rgtd.nodeB] + + when: "the source tries to convert it" + def connectorDataOption = source.buildUntypedConnectorInputEntityData(validAssetEntityInputData, nodes) + + then: "everything is fine" + connectorDataOption.isPresent() + connectorDataOption.get().with { + assert fieldsToValues == expectedFieldsToAttributes + assert entityClass == SwitchInput + assert nodeA == rgtd.nodeA + assert nodeB == rgtd.nodeB + } + } + + def "The CsvRawGridSource is NOT able to convert single invalid AssetInputEntityData to ConnectorInputEntityData"() { + given: "invalid input data" + def fieldsToAttributes = [ + "uuid" : "5dc88077-aeb6-4711-9142-db57287640b1", + "id" : "test_switch_AtoB", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "nodeA" : "4ca90220-74c2-4369-9afa-a18bf068840d", + "nodeB" : "620d35fc-34f8-48af-8020-3897fe75add7", + "closed" : "true" + ] + + def validAssetEntityInputData = new AssetInputEntityData(fieldsToAttributes, SwitchInput) + + def nodes = [rgtd.nodeA, rgtd.nodeB] + + when: "the source tries to convert it" + def connectorDataOption = source.buildUntypedConnectorInputEntityData(validAssetEntityInputData, nodes) + + then: "it returns en empty Optional" + !connectorDataOption.isPresent() + } + + + def "The CsvRawGridSource is able to convert a stream of valid AssetInputEntityData to ConnectorInputEntityData"() { + given: "valid input data" + def validStream = Stream.of( + new AssetInputEntityData([ + "uuid" : "5dc88077-aeb6-4711-9142-db57287640b1", + "id" : "test_switch_AtoB", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "nodeA" : "4ca90220-74c2-4369-9afa-a18bf068840d", + "nodeB" : "47d29df0-ba2d-4d23-8e75-c82229c5c758", + "closed" : "true" + ], SwitchInput), + new AssetInputEntityData([ + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "test_lineCtoD", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "nodeA" : "bd837a25-58f3-44ac-aa90-c6b6e3cd91b2", + "nodeB" : "6e0980e0-10f2-4e18-862b-eb2b7c90509b", + "parallelDevices" : "2", + "type" : "3bed3eb3-9790-4874-89b5-a5434d408088", + "length" : "0.003", + "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", + "olmCharacteristic" : "olm:{(0.0,1.0)}" + ], + LineInput) + ) + + def expectedSet = [ + Optional.of(new ConnectorInputEntityData([ + "uuid" : "5dc88077-aeb6-4711-9142-db57287640b1", + "id" : "test_switch_AtoB", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "closed" : "true" + ], + SwitchInput, + rgtd.nodeA, + rgtd.nodeB + )), + Optional.of(new ConnectorInputEntityData([ + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "test_lineCtoD", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "2", + "type" : "3bed3eb3-9790-4874-89b5-a5434d408088", + "length" : "0.003", + "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", + "olmCharacteristic" : "olm:{(0.0,1.0)}" + ], + LineInput, + rgtd.nodeC, + rgtd.nodeD + )) + ] as Set + + def nodes = [ + rgtd.nodeA, + rgtd.nodeB, + rgtd.nodeC, + rgtd.nodeD + ] + + when: "the source tries to convert it" + def actualSet = source.buildUntypedConnectorInputEntityData(validStream, nodes).collect(Collectors.toSet()) + + then: "everything is fine" + actualSet.size() == expectedSet.size() + actualSet.containsAll(expectedSet) + } + + def "The CsvRawGridSource is able to add a type to untyped ConnectorInputEntityData correctly"() { + given: "valid input data" + def validConnectorEntityData = new ConnectorInputEntityData([ + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "test_lineCtoD", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "2", + "type" : "3bed3eb3-9790-4874-89b5-a5434d408088", + "length" : "0.003", + "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", + "olmCharacteristic" : "olm:{(0.0,1.0)}" + ], + LineInput, + rgtd.nodeC, + rgtd.nodeD + ) + + def expectedTypedEntityData = new TypedConnectorInputEntityData([ + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "test_lineCtoD", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "2", + "length" : "0.003", + "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", + "olmCharacteristic" : "olm:{(0.0,1.0)}" + ], + LineInput, + rgtd.nodeC, + rgtd.nodeD, + rgtd.lineTypeInputCtoD + ) + + when: "the source tries to convert it" + def actual = source.addTypeToEntityData(validConnectorEntityData, rgtd.lineTypeInputCtoD) + + then: "everything is fine" + actual == expectedTypedEntityData + } + + def "The CsvRawGridSource is able to find and add a type to untyped ConnectorInputEntityData correctly"() { + given: "valid input data" + def validConnectorEntityData = new ConnectorInputEntityData([ + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "test_lineCtoD", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "2", + "type" : "3bed3eb3-9790-4874-89b5-a5434d408088", + "length" : "0.003", + "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", + "olmCharacteristic" : "olm:{(0.0,1.0)}" + ], + LineInput, + rgtd.nodeC, + rgtd.nodeD + ) + + def expectedTypedEntityData = Optional.of(new TypedConnectorInputEntityData([ + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "test_lineCtoD", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "2", + "length" : "0.003", + "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", + "olmCharacteristic" : "olm:{(0.0,1.0)}" + ], + LineInput, + rgtd.nodeC, + rgtd.nodeD, + rgtd.lineTypeInputCtoD + )) + + def availableTypes = [rgtd.lineTypeInputCtoD] + + when: "the source tries to convert it" + def actual = source.findAndAddType(validConnectorEntityData, availableTypes) + + then: "everything is fine" + actual == expectedTypedEntityData + } + + def "The CsvRawGridSource is able to identify ConnectorInputEntityData data with non matching type requirements correctly"() { + given: "valid input data" + def validConnectorEntityData = new ConnectorInputEntityData([ + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "test_lineCtoD", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "2", + "type" : "fd5b128d-ed35-4355-94b6-7518c55425fe", + "length" : "0.003", + "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", + "olmCharacteristic" : "olm:{(0.0,1.0)}" + ], + LineInput, + rgtd.nodeC, + rgtd.nodeD + ) + + def availableTypes = [rgtd.lineTypeInputCtoD] + + when: "the source tries to convert it" + def actual = source.findAndAddType(validConnectorEntityData, availableTypes) + + then: "everything is fine" + !actual.isPresent() + } + + def "The CsvRawGridSource is able to convert a stream of valid ConnectorInputEntityData to TypedConnectorInputEntityData"() { + given: "valid input data" + def validStream = Stream.of( + Optional.of(new ConnectorInputEntityData([ + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "test_lineCtoD", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "2", + "type" : "3bed3eb3-9790-4874-89b5-a5434d408088", + "length" : "0.003", + "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", + "olmCharacteristic" : "olm:{(0.0,1.0)}" + ], + LineInput, + rgtd.nodeC, + rgtd.nodeD + )), + Optional.of(new ConnectorInputEntityData([ + "uuid" : "92ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "test_line_AtoB", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "2", + "type" : "3bed3eb3-9790-4874-89b5-a5434d408088", + "length" : "0.003", + "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", + "olmCharacteristic" : "olm:{(0.0,1.0)}" + ], LineInput, + rgtd.nodeA, + rgtd.nodeB + )) + ) + + def expectedSet = [ + Optional.of(new TypedConnectorInputEntityData<>([ + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "test_lineCtoD", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "2", + "length" : "0.003", + "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", + "olmCharacteristic" : "olm:{(0.0,1.0)}" + ], + LineInput, + rgtd.nodeC, + rgtd.nodeD, + rgtd.lineTypeInputCtoD + )), + Optional.of(new TypedConnectorInputEntityData<>([ + "uuid" : "92ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "id" : "test_line_AtoB", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "2", + "length" : "0.003", + "geoPosition" : "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}", + "olmCharacteristic" : "olm:{(0.0,1.0)}" + ], LineInput, + rgtd.nodeA, + rgtd.nodeB, + rgtd.lineTypeInputCtoD + )) + ] + + def availableTypes = [rgtd.lineTypeInputCtoD] + + when: "the source tries to convert it" + def actualSet = source.buildTypedConnectorEntityData(validStream, availableTypes).collect(Collectors.toSet()) + + then: "everything is fine" + actualSet.size() == expectedSet.size() + actualSet.containsAll(expectedSet) + } + + def "The CsvRawGridSource is able to add the third node for a three winding transformer correctly"() { + given: "valid input data" + def typedEntityData = new TypedConnectorInputEntityData([ + "uuid" : "cc327469-7d56-472b-a0df-edbb64f90e8f", + "id" : "3w_test", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "nodeC" : "bd837a25-58f3-44ac-aa90-c6b6e3cd91b2", + "parallelDevices" : "1", + "tapPos" : "0", + "autoTap" : "true" + ], + Transformer3WInput, + rgtd.nodeA, + rgtd.nodeB, + rgtd.transformerTypeAtoBtoC) + + def expected = Optional.of(new Transformer3WInputEntityData([ + "uuid" : "cc327469-7d56-472b-a0df-edbb64f90e8f", + "id" : "3w_test", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "1", + "tapPos" : "0", + "autoTap" : "true" + ], + Transformer3WInput, + rgtd.nodeA, + rgtd.nodeB, + rgtd.nodeC, + rgtd.transformerTypeAtoBtoC)) + + def availableNodes = [ + rgtd.nodeA, + rgtd.nodeB, + rgtd.nodeC + ] + + when: "the sources tries to add the node" + def actual = source.addThirdNode(typedEntityData, availableNodes) + + then: "everything is fine" + actual == expected + } + + def "The CsvRawGridSource is NOT able to add the third node for a three winding transformer, if it is not available"() { + given: "valid input data" + def typedEntityData = new TypedConnectorInputEntityData([ + "uuid" : "cc327469-7d56-472b-a0df-edbb64f90e8f", + "id" : "3w_test", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "nodeC" : "bd8927b4-0ca9-4dd3-b645-468e6e433160", + "parallelDevices" : "1", + "tapPos" : "0", + "autoTap" : "true" + ], + Transformer3WInput, + rgtd.nodeA, + rgtd.nodeB, + rgtd.transformerTypeAtoBtoC) + + def availableNodes = [ + rgtd.nodeA, + rgtd.nodeB, + rgtd.nodeC + ] + + when: "the sources tries to add the node" + def actual = source.addThirdNode(typedEntityData, availableNodes) + + then: "everything is fine" + !actual.isPresent() + } + + def "The CsvRawGridSource is able to add the third node for a three winding transformer to a stream of candidates"() { + given: "suitable input data" + def inputStream = Stream.of(Optional.of(new TypedConnectorInputEntityData([ + "uuid" : "cc327469-7d56-472b-a0df-edbb64f90e8f", + "id" : "3w_test", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "nodeC" : "bd837a25-58f3-44ac-aa90-c6b6e3cd91b2", + "parallelDevices" : "1", + "tapPos" : "0", + "autoTap" : "true" + ], + Transformer3WInput, + rgtd.nodeA, + rgtd.nodeB, + rgtd.transformerTypeAtoBtoC)), + Optional.of(new TypedConnectorInputEntityData([ + "uuid" : "cc327469-7d56-472b-a0df-edbb64f90e8f", + "id" : "3w_test", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "nodeC" : "bd8927b4-0ca9-4dd3-b645-468e6e433160", + "parallelDevices" : "1", + "tapPos" : "0", + "autoTap" : "true" + ], + Transformer3WInput, + rgtd.nodeA, + rgtd.nodeB, + rgtd.transformerTypeAtoBtoC)) + ) + + def availableNodes = [ + rgtd.nodeA, + rgtd.nodeB, + rgtd.nodeC + ] + + def expectedSet = [ + Optional.of(new Transformer3WInputEntityData([ + "uuid" : "cc327469-7d56-472b-a0df-edbb64f90e8f", + "id" : "3w_test", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-24 15:11:31", + "parallelDevices" : "1", + "tapPos" : "0", + "autoTap" : "true" + ], + Transformer3WInput, + rgtd.nodeA, + rgtd.nodeB, + rgtd.nodeC, + rgtd.transformerTypeAtoBtoC)), + Optional.empty() + ] + + when: "the sources tries to add nodes" + def actualSet = source.buildTransformer3WEntityData(inputStream, availableNodes).collect(Collectors.toSet()) + + then: "everything is fine" + actualSet.size() == expectedSet.size() + actualSet.containsAll(expectedSet) + } + + def "The CsvRawGridSource is able to load all nodes from file"() { + when: "loading all nodes from file" + def actualSet = source.getNodes() + def expectedSet = [ + rgtd.nodeA, + rgtd.nodeB, + rgtd.nodeC, + rgtd.nodeD, + rgtd.nodeE, + rgtd.nodeF, + rgtd.nodeG + ] + + then: "all nodes are there" + actualSet.size() == expectedSet.size() + + actualSet.each {actual -> + def expected = expectedSet.find {it.uuid == actual.uuid} + assert expected != null + + actual.with { + assert uuid == expected.uuid + assert id == expected.id + assert operator == expected.operator + assert operationTime == expected.operationTime + assert vTarget == expected.vTarget + assert slack == expected.slack + assert geoPosition.coordinates == expected.geoPosition.coordinates + assert voltLvl == expected.voltLvl + assert subnet == expected.subnet + } + } + } + + def "The CsvRawGridSource is able to load all measurement units from file"() { + when: "loading all measurement units from file" + def actualSet = source.getMeasurementUnits() + def expectedSet = [ + rgtd.measurementUnitInput + ] + + then: "all measurement units are there" + actualSet.size() == expectedSet.size() + actualSet.each {actual -> + def expected = expectedSet.find {it.uuid == actual.uuid} + assert expected != null + + actual.with { + assert uuid == expected.uuid + assert id == expected.id + assert operator == expected.operator + assert operationTime == expected.operationTime + assert node.uuid == expected.node.uuid + assert vMag == expected.vMag + assert vAng == expected.vAng + assert p == expected.p + assert q == expected.q + } + } + } + + def "The CsvRawGridSource is able to load all switches from file"() { + when: "loading all switches from file" + def actualSet = source.getSwitches() + def expectedSet = [rgtd.switchAtoB] + + then: "all switches are there" + actualSet.size() == expectedSet.size() + actualSet.each {actual -> + def expected = expectedSet.find {it.uuid == actual.uuid} + assert expected != null + + actual.with { + assert uuid == expected.uuid + assert id == expected.id + assert operator == expected.operator + assert operationTime == expected.operationTime + assert nodeA.uuid == expected.nodeA.uuid + assert nodeB.uuid == expected.nodeB.uuid + assert closed == expected.closed + } + } + } + + def "The CsvRawGridSource is able to load all lines from file"() { + when: "loading all lines from file" + def actualSet = source.getLines() + def expectedSet = [ + rgtd.lineAtoB, + rgtd.lineCtoD + ] + + then: "all lines are there" + actualSet.size() == expectedSet.size() + actualSet.each {actual -> + def expected = expectedSet.find {it.uuid == actual.uuid} + assert expected != null + + actual.with { + assert uuid == expected.uuid + assert id == expected.id + assert operator == expected.operator + assert operationTime == expected.operationTime + assert nodeA.uuid == expected.nodeA.uuid + assert nodeB.uuid == expected.nodeB.uuid + assert parallelDevices == expected.parallelDevices + assert type == expected.type + assert length == expected.length + assert geoPosition.coordinates == expected.geoPosition.coordinates + assert olmCharacteristic == expected.olmCharacteristic + } + } + } + + def "The CsvRawGridSource is able to load all two winding transformers from file"() { + when: "loading all two winding transformers from file" + def actualSet = source.get2WTransformers() + def expectedSet = [ + GridTestData.transformerBtoD, + GridTestData.transformerBtoE, + GridTestData.transformerCtoE, + GridTestData.transformerCtoF, + GridTestData.transformerCtoG + ] + + then: "all two winding transformers are there" + actualSet.size() == expectedSet.size() + actualSet.each {actual -> + def expected = expectedSet.find {it.uuid == actual.uuid} + assert expected != null + + actual.with { + assert uuid == expected.uuid + assert id == expected.id + assert operator == expected.operator + assert operationTime == expected.operationTime + assert nodeA.uuid == expected.nodeA.uuid + assert nodeB.uuid == expected.nodeB.uuid + assert parallelDevices == expected.parallelDevices + assert type == expected.type + assert tapPos == expected.tapPos + assert autoTap == expected.autoTap + } + } + } + + def "The CsvRawGridSource is able to load all three winding transformers from file"() { + when: "loading all three winding transformers from file" + def actualSet = source.get3WTransformers() + def expectedSet = [ + GridTestData.transformerAtoBtoC + ] + + then: "all three winding transformers are there" + actualSet.size() == expectedSet.size() + actualSet.each {actual -> + def expected = expectedSet.find {it.uuid == actual.uuid} + assert expected != null + + actual.with { + assert uuid == expected.uuid + assert id == expected.id + assert operator == expected.operator + assert operationTime == expected.operationTime + assert nodeA.uuid == expected.nodeA.uuid + assert nodeB.uuid == expected.nodeB.uuid + assert nodeC.uuid == expected.nodeC.uuid + assert parallelDevices == expected.parallelDevices + assert type == expected.type + assert tapPos == expected.tapPos + assert autoTap == expected.autoTap + } + } + } + + def "The CsvRawGridSource is able to provide a correct RawGridElements"() { + when: "loading a total grid structure from file" + def actual = source.getGridData() + def expected = new RawGridElements( + [ + rgtd.nodeA, + rgtd.nodeB, + rgtd.nodeC, + rgtd.nodeD, + rgtd.nodeE, + rgtd.nodeF, + rgtd.nodeG + ] as Set, + [ + rgtd.lineAtoB, + rgtd.lineCtoD + ] as Set, + [ + GridTestData.transformerBtoD, + GridTestData.transformerBtoE, + GridTestData.transformerCtoE, + GridTestData.transformerCtoF, + GridTestData.transformerCtoG + ] as Set, + [ + GridTestData.transformerAtoBtoC + ] as Set, + [rgtd.switchAtoB + ] as Set, + [ + rgtd.measurementUnitInput + ] as Set + ) + + then: "all elements are there" + actual.isPresent() + actual.get().with { + /* It's okay, to only test the uuids, because content is tested with the other test mehtods */ + assert nodes.size() == expected.nodes.size() + assert nodes.each {entry -> expected.nodes.contains({it.uuid == entry.uuid})} + assert lines.size() == expected.lines.size() + assert lines.each {entry -> expected.lines.contains({it.uuid == entry.uuid})} + assert transformer2Ws.size() == expected.transformer2Ws.size() + assert transformer2Ws.each {entry -> expected.transformer2Ws.contains({it.uuid == entry.uuid})} + assert transformer3Ws.size() == expected.transformer3Ws.size() + assert transformer3Ws.each {entry -> expected.transformer3Ws.contains({it.uuid == entry.uuid})} + assert switches.size() == expected.switches.size() + assert switches.each {entry -> expected.switches.contains({it.uuid == entry.uuid})} + assert measurementUnits.size() == expected.measurementUnits.size() + assert measurementUnits.each {entry -> expected.measurementUnits.contains({it.uuid == entry.uuid})} + } + } + + def "The CsvRawGridSource returns an empty Optional, if one mandatory element for the RawGridElements is missing"() { + given: "a source pointing to malformed grid data" + CsvTypeSource typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) + source = new CsvRawGridSource(csvSep, gridFolderPath+"_malformed", fileNamingStrategy, typeSource) + + when: "loading a total grid structure from file" + def actual = source.getGridData() + + then: "the optional is empty" + !actual.isPresent() + } + + def "The CsvRawGridSource returns an empty Optional, if the RawGridElements contain no single element"() { + given: "a source pointing to malformed grid data" + CsvTypeSource typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) + source = new CsvRawGridSource(csvSep, gridFolderPath+"_empty", fileNamingStrategy, typeSource) + + when: "loading a total grid structure from file" + def actual = source.getGridData() + + then: "the optional is empty" + !actual.isPresent() + } +} \ No newline at end of file diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvSystemParticipantSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvSystemParticipantSourceTest.groovy new file mode 100644 index 000000000..0ac57ec55 --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvSystemParticipantSourceTest.groovy @@ -0,0 +1,371 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.source.csv + +import edu.ie3.datamodel.io.factory.input.NodeAssetInputEntityData +import edu.ie3.datamodel.io.factory.input.participant.ChpInputEntityData +import edu.ie3.datamodel.io.factory.input.participant.HpInputEntityData +import edu.ie3.datamodel.io.factory.input.participant.SystemParticipantTypedEntityData +import edu.ie3.datamodel.io.source.RawGridSource +import edu.ie3.datamodel.models.input.NodeInput +import edu.ie3.datamodel.models.input.OperatorInput +import edu.ie3.datamodel.models.input.system.BmInput +import edu.ie3.datamodel.models.input.system.ChpInput +import edu.ie3.datamodel.models.input.system.EvInput +import edu.ie3.datamodel.models.input.system.FixedFeedInInput +import edu.ie3.datamodel.models.input.system.HpInput +import edu.ie3.datamodel.models.input.system.LoadInput +import edu.ie3.datamodel.models.input.system.PvInput +import edu.ie3.datamodel.models.input.system.StorageInput +import edu.ie3.datamodel.models.input.system.WecInput +import edu.ie3.datamodel.models.input.thermal.ThermalBusInput +import edu.ie3.datamodel.models.input.thermal.ThermalStorageInput +import edu.ie3.test.common.SystemParticipantTestData as sptd +import org.apache.commons.lang3.NotImplementedException +import spock.lang.Specification + +class CsvSystemParticipantSourceTest extends Specification implements CsvTestDataMeta { + + def "A CsvSystemParticipantSource should provide an instance of SystemParticipants based on valid input data correctly"() { + given: + def typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) + def thermalSource = new CsvThermalSource(csvSep, participantsFolderPath, fileNamingStrategy, typeSource) + def rawGridSource = new CsvRawGridSource(csvSep, gridFolderPath, fileNamingStrategy, typeSource) + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, + participantsFolderPath, fileNamingStrategy, typeSource, + thermalSource, rawGridSource) + + when: + def systemParticipantsOpt = csvSystemParticipantSource.getSystemParticipants() + + then: + systemParticipantsOpt.present + systemParticipantsOpt.ifPresent({ systemParticipants -> + assert (systemParticipants.allEntitiesAsList().size() == 9) + assert (systemParticipants.getPvPlants().first().uuid == sptd.pvInput.uuid) + assert (systemParticipants.getBmPlants().first().uuid == sptd.bmInput.uuid) + assert (systemParticipants.getChpPlants().first().uuid == sptd.chpInput.uuid) + assert (systemParticipants.getEvs().first().uuid == sptd.evInput.uuid) + assert (systemParticipants.getFixedFeedIns().first().uuid == sptd.fixedFeedInInput.uuid) + assert (systemParticipants.getHeatPumps().first().uuid == sptd.hpInput.uuid) + assert (systemParticipants.getLoads().first().uuid == sptd.loadInput.uuid) + assert (systemParticipants.getWecPlants().first().uuid == sptd.wecInput.uuid) + assert (systemParticipants.getStorages().first().uuid == sptd.storageInput.uuid) + assert (systemParticipants.getEvCS() == [] as Set) + }) + } + + def "A CsvSystemParticipantSource should process invalid input data as expected when requested to provide an instance of SystemParticipants"() { + given: + def typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) + def thermalSource = new CsvThermalSource(csvSep, participantsFolderPath, fileNamingStrategy, typeSource) + def rawGridSource = Spy(CsvRawGridSource, constructorArgs: [ + csvSep, + gridFolderPath, + fileNamingStrategy, + typeSource + ]) { + // partly fake the return method of the csv raw grid source to always return empty node sets + // -> elements to build NodeGraphicInputs are missing + getNodes() >> new HashSet() + getNodes(_) >> new HashSet() + } as RawGridSource + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, + participantsFolderPath, fileNamingStrategy, typeSource, + thermalSource, rawGridSource) + + when: + def systemParticipantsOpt = csvSystemParticipantSource.getSystemParticipants() + + then: + !systemParticipantsOpt.present + } + + def "A CsvSystemParticipantSource should build typed entity from valid and invalid input data as expected"() { + given: + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, + participantsFolderPath, fileNamingStrategy, Mock(CsvTypeSource), + Mock(CsvThermalSource), Mock(CsvRawGridSource)) + + def nodeAssetInputEntityData = new NodeAssetInputEntityData(fieldsToAttributes, clazz, operator, node) + + when: + def typedEntityDataOpt = csvSystemParticipantSource.buildTypedEntityData(nodeAssetInputEntityData, types) + + then: + typedEntityDataOpt.present == resultIsPresent + typedEntityDataOpt.ifPresent({ typedEntityData -> + assert (typedEntityData == resultData) + }) + + where: + types | node | operator | fieldsToAttributes | clazz || resultIsPresent || resultData + []| sptd.chpInput.node | sptd.chpInput.operator | ["type": "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8"] | ChpInput || false || null + [sptd.chpTypeInput]| sptd.chpInput.node | sptd.chpInput.operator | ["bla": "foo"] | ChpInput || false || null + [sptd.chpTypeInput]| sptd.chpInput.node | sptd.chpInput.operator | [:] | ChpInput || false || null + [sptd.chpTypeInput]| sptd.chpInput.node | sptd.chpInput.operator | ["type": "5ebd8f7e-dedb-4017-bb86-6373c4b68eb9"] | ChpInput || false || null + [sptd.chpTypeInput]| sptd.chpInput.node | sptd.chpInput.operator | ["type": "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8"] | ChpInput || true || new SystemParticipantTypedEntityData<>([:], clazz, operator, node, sptd.chpTypeInput) + } + + def "A CsvSystemParticipantSource should build hp input entity from valid and invalid input data as expected"() { + given: + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, + participantsFolderPath, fileNamingStrategy, Mock(CsvTypeSource), + Mock(CsvThermalSource), Mock(CsvRawGridSource)) + + def sysPartTypedEntityData = new SystemParticipantTypedEntityData<>(fieldsToAttributes, HpInput, sptd.hpInput.operator, sptd.hpInput.node, sptd.hpTypeInput) + + when: + def hpInputEntityDataOpt = csvSystemParticipantSource.buildHpEntityData(sysPartTypedEntityData, thermalBuses) + + then: + hpInputEntityDataOpt.present == resultIsPresent + hpInputEntityDataOpt.ifPresent({ hpInputEntityData -> + assert (hpInputEntityData == resultData) + }) + + where: + thermalBuses | fieldsToAttributes || resultIsPresent || resultData + []| ["thermalBus": "0d95d7f2-49fb-4d49-8636-383a5220384e"] || false || null + [sptd.hpInput.thermalBus]| ["bla": "foo"] || false || null + [sptd.hpInput.thermalBus]| [:] || false || null + [sptd.hpInput.thermalBus]| ["thermalBus": "0d95d7f2-49fb-4d49-8636-383a5220384f"] || false || null + [sptd.hpInput.thermalBus]| ["thermalBus": "0d95d7f2-49fb-4d49-8636-383a5220384e"] || true || new HpInputEntityData([:], sptd.hpInput.operator, sptd.hpInput.node, sptd.hpTypeInput, sptd.hpInput.thermalBus) + } + + def "A CsvSystemParticipantSource should build chp input entity from valid and invalid input data as expected"(List thermalStorages, List thermalBuses, Map fieldsToAttributes, boolean resultIsPresent, ChpInputEntityData resultData) { + given: + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, + participantsFolderPath, fileNamingStrategy, Mock(CsvTypeSource), + Mock(CsvThermalSource), Mock(CsvRawGridSource)) + + def sysPartTypedEntityData = new SystemParticipantTypedEntityData<>(fieldsToAttributes, ChpInput, sptd.chpInput.operator, sptd.chpInput.node, sptd.chpTypeInput) + + when: + def hpInputEntityDataOpt = csvSystemParticipantSource.buildChpEntityData(sysPartTypedEntityData, thermalStorages, thermalBuses) + + then: + hpInputEntityDataOpt.present == resultIsPresent + hpInputEntityDataOpt.ifPresent({ hpInputEntityData -> + assert (hpInputEntityData == resultData) + }) + + where: + thermalStorages | thermalBuses | fieldsToAttributes || resultIsPresent | resultData + [] as List | [] as List | ["thermalBus": "0d95d7f2-49fb-4d49-8636-383a5220384e", "thermalStorage": "8851813b-3a7d-4fee-874b-4df9d724e4b3"] || false | null + [ + sptd.chpInput.thermalStorage] as List | [sptd.chpInput.thermalBus] as List | ["bla": "foo"] || false | null + [ + sptd.chpInput.thermalStorage] as List | [sptd.chpInput.thermalBus] as List | [:] || false | null + [ + sptd.chpInput.thermalStorage] as List | [sptd.chpInput.thermalBus] as List | ["thermalBus": "0d95d7f2-49fb-4d49-8636-383a5220384e", "thermalStorage": "8851813b-3a7d-4fee-874b-4df9d724e4b3"] || true | new ChpInputEntityData([:], sptd.chpInput.operator, sptd.chpInput.node, sptd.chpTypeInput, sptd.chpInput.thermalBus, sptd.chpInput.thermalStorage) + } + + def "A CsvSystemParticipantSource should return data from a valid heat pump input file as expected"() { + given: + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, + fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + + expect: + def heatPumps = csvSystemParticipantSource.getHeatPumps(nodes as Set, operators as Set, types as Set, thermalBuses as Set) + heatPumps.size() == resultingSize + heatPumps == resultingSet as Set + + where: + nodes | operators | types | thermalBuses || resultingSize || resultingSet + [sptd.hpInput.node]| [sptd.hpInput.operator]| [sptd.hpInput.type]| [sptd.hpInput.thermalBus]|| 1 || [sptd.hpInput] + [sptd.hpInput.node]| []| [sptd.hpInput.type]| [sptd.hpInput.thermalBus]|| 1 || [ + new HpInput(sptd.hpInput.uuid, sptd.hpInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.hpInput.operationTime, sptd.hpInput.node, sptd.hpInput.thermalBus, sptd.hpInput.qCharacteristics, sptd.hpInput.type) + ] + []| []| []| []|| 0 || [] + [sptd.hpInput.node]| []| []| []|| 0 || [] + [sptd.hpInput.node]| [sptd.hpInput.operator]| []| []|| 0 || [] + [sptd.hpInput.node]| [sptd.hpInput.operator]| [sptd.hpInput.type]| []|| 0 || [] + } + + def "A CsvSystemParticipantSource should return data from a valid chp input file as expected"() { + given: + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, + fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + + expect: + def chpUnits = csvSystemParticipantSource.getChpPlants(nodes as Set, operators as Set, types as Set, thermalBuses as Set, thermalStorages as Set) + chpUnits.size() == resultingSize + chpUnits == resultingSet as Set + + where: + nodes | operators | types | thermalBuses | thermalStorages || resultingSize || resultingSet + [sptd.chpInput.node]| [sptd.chpInput.operator]| [sptd.chpInput.type]| [sptd.chpInput.thermalBus]| [ + sptd.chpInput.thermalStorage] as List || 1 || [sptd.chpInput] + [sptd.chpInput.node]| []| [sptd.chpInput.type]| [sptd.chpInput.thermalBus]| [ + sptd.chpInput.thermalStorage] as List || 1 || [ + new ChpInput(sptd.chpInput.uuid, sptd.chpInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.chpInput.operationTime, sptd.chpInput.node, sptd.chpInput.thermalBus, sptd.chpInput.qCharacteristics, sptd.chpInput.type, sptd.chpInput.thermalStorage, sptd.chpInput.marketReaction) + ] + []| []| []| []| [] as List || 0 || [] + [sptd.chpInput.node]| []| []| []| [] as List || 0 || [] + [sptd.chpInput.node]| [sptd.chpInput.operator]| []| []| [] as List || 0 || [] + [sptd.chpInput.node]| [sptd.chpInput.operator]| [sptd.chpInput.type]| []| [] as List || 0 || [] + } + + def "A CsvSystemParticipantSource should return data from valid ev input file as expected"() { + given: + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, + fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + + expect: + def sysParts = csvSystemParticipantSource.getEvs(nodes as Set, operators as Set, types as Set) + sysParts.size() == resultingSize + sysParts == resultingSet as Set + + where: + nodes | operators | types || resultingSize || resultingSet + [sptd.evInput.node]| [sptd.evInput.operator]| [sptd.evInput.type]|| 1 || [sptd.evInput] + [sptd.evInput.node]| []| [sptd.evInput.type]|| 1 || [ + new EvInput(sptd.evInput.uuid, sptd.evInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.evInput.operationTime, sptd.evInput.node, sptd.evInput.qCharacteristics, sptd.evInput.type) + ] + [sptd.evInput.node]| [sptd.evInput.operator]| []|| 0 || [] + [sptd.evInput.node]| []| []|| 0 || [] + []| []| []|| 0 || [] + } + + def "A CsvSystemParticipantSource should return data from valid wec input file as expected"() { + given: + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, + fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + + expect: + def sysParts = csvSystemParticipantSource.getWecPlants(nodes as Set, operators as Set, types as Set) + sysParts.size() == resultingSize + sysParts == resultingSet as Set + + where: + nodes | operators | types || resultingSize || resultingSet + [sptd.wecInput.node]| [sptd.wecInput.operator]| [sptd.wecInput.type]|| 1 || [sptd.wecInput] + [sptd.wecInput.node]| []| [sptd.wecInput.type]|| 1 || [ + new WecInput(sptd.wecInput.uuid, sptd.wecInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.wecInput.operationTime, sptd.wecInput.node, sptd.wecInput.qCharacteristics, sptd.wecInput.type, sptd.wecInput.marketReaction) + ] + [sptd.wecInput.node]| [sptd.wecInput.operator]| []|| 0 || [] + [sptd.wecInput.node]| []| []|| 0 || [] + []| []| []|| 0 || [] + } + + def "A CsvSystemParticipantSource should return data from valid storage input file as expected"() { + given: + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, + fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + + expect: + def sysParts = csvSystemParticipantSource.getStorages(nodes as Set, operators as Set, types as Set) + sysParts.size() == resultingSize + sysParts == resultingSet as Set + + where: + nodes | operators | types || resultingSize || resultingSet + [sptd.storageInput.node]| [sptd.storageInput.operator]| [sptd.storageInput.type]|| 1 || [sptd.storageInput] + [sptd.storageInput.node]| []| [sptd.storageInput.type]|| 1 || [ + new StorageInput(sptd.storageInput.uuid, sptd.storageInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.storageInput.operationTime, sptd.storageInput.node, sptd.storageInput.qCharacteristics, sptd.storageInput.type, sptd.storageInput.behaviour.token) + ] + [sptd.storageInput.node]| [sptd.storageInput.operator]| []|| 0 || [] + [sptd.storageInput.node]| []| []|| 0 || [] + []| []| []|| 0 || [] + } + + def "A CsvSystemParticipantSource should return data from valid bm input file as expected"() { + given: + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, + fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + + expect: + def sysParts = csvSystemParticipantSource.getBmPlants(nodes as Set, operators as Set, types as Set) + sysParts.size() == resultingSize + sysParts == resultingSet as Set + + where: + nodes | operators | types || resultingSize || resultingSet + [sptd.bmInput.node]| [sptd.bmInput.operator]| [sptd.bmInput.type]|| 1 || [sptd.bmInput] + [sptd.bmInput.node]| []| [sptd.bmInput.type]|| 1 || [ + new BmInput(sptd.bmInput.uuid, sptd.bmInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.bmInput.operationTime, sptd.bmInput.node, sptd.bmInput.qCharacteristics, sptd.bmInput.type, sptd.bmInput.marketReaction, sptd.bmInput.costControlled, sptd.bmInput.feedInTariff) + ] + [sptd.bmInput.node]| [sptd.bmInput.operator]| []|| 0 || [] + [sptd.bmInput.node]| []| []|| 0 || [] + []| []| []|| 0 || [] + } + + def "A CsvSystemParticipantSource should return data from valid ev charging station input file as expected"() { + given: + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, + fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + + when: + csvSystemParticipantSource.getEvCS() + + then: + NotImplementedException thrown = thrown(NotImplementedException) + thrown.message.startsWith("Ev Charging Stations are not implemented yet!") + } + + def "A CsvSystemParticipantSource should return data from valid load input file as expected"() { + given: + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, + fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + + expect: + def sysParts = csvSystemParticipantSource.getLoads(nodes as Set, operators as Set) + sysParts.size() == resultingSize + sysParts == resultingSet as Set + + where: + nodes | operators || resultingSize || resultingSet + [sptd.loadInput.node]| [sptd.loadInput.operator]|| 1 || [sptd.loadInput] + [sptd.loadInput.node]| []|| 1 || [ + new LoadInput(sptd.loadInput.uuid, sptd.loadInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.loadInput.operationTime, sptd.loadInput.node, sptd.loadInput.qCharacteristics, sptd.loadInput.standardLoadProfile, sptd.loadInput.dsm, sptd.loadInput.eConsAnnual, sptd.loadInput.sRated, sptd.loadInput.cosphiRated) + ] + []| [sptd.loadInput.operator]|| 0 || [] + []| []|| 0 || [] + } + + def "A CsvSystemParticipantSource should return data from valid pv input file as expected"() { + given: + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, + fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + + expect: + def sysParts = csvSystemParticipantSource.getPvPlants(nodes as Set, operators as Set) + sysParts.size() == resultingSize + sysParts == resultingSet as Set + + where: + nodes | operators || resultingSize || resultingSet + [sptd.pvInput.node]| [sptd.pvInput.operator]|| 1 || [sptd.pvInput] + [sptd.pvInput.node]| []|| 1 || [ + new PvInput(sptd.pvInput.uuid, sptd.pvInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.pvInput.operationTime, sptd.pvInput.node, sptd.pvInput.qCharacteristics, sptd.pvInput.albedo, sptd.pvInput.azimuth, sptd.pvInput.etaConv, sptd.pvInput.height, sptd.pvInput.kG, sptd.pvInput.kT, sptd.pvInput.marketReaction, sptd.pvInput.sRated, sptd.pvInput.cosphiRated) + ] + []| [sptd.pvInput.operator]|| 0 || [] + []| []|| 0 || [] + } + + def "A CsvSystemParticipantSource should return data from valid fixedFeedIn input file as expected"() { + given: + def csvSystemParticipantSource = new CsvSystemParticipantSource(csvSep, participantsFolderPath, + fileNamingStrategy, Mock(CsvTypeSource), Mock(CsvThermalSource), Mock(CsvRawGridSource)) + + expect: + def sysParts = csvSystemParticipantSource.getFixedFeedIns(nodes as Set, operators as Set) + sysParts.size() == resultingSize + sysParts == resultingSet as Set + + where: + nodes | operators || resultingSize || resultingSet + [sptd.fixedFeedInInput.node]| [ + sptd.fixedFeedInInput.operator] as List || 1 || [sptd.fixedFeedInInput] + [sptd.fixedFeedInInput.node]| [] as List || 1 || [ + new FixedFeedInInput(sptd.fixedFeedInInput.uuid, sptd.fixedFeedInInput.id, OperatorInput.NO_OPERATOR_ASSIGNED, sptd.fixedFeedInInput.operationTime, sptd.fixedFeedInInput.node, sptd.fixedFeedInInput.qCharacteristics, sptd.fixedFeedInInput.sRated, sptd.fixedFeedInInput.cosphiRated) + ] + []| [ + sptd.fixedFeedInInput.operator] as List || 0 || [] + []| [] as List || 0 || [] + } +} diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTestDataMeta.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTestDataMeta.groovy new file mode 100644 index 000000000..228427762 --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTestDataMeta.groovy @@ -0,0 +1,26 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.source.csv + +import edu.ie3.datamodel.io.FileNamingStrategy + +/** + * //ToDo: Class Description + * + * @version 0.1* @since 13.04.20 + */ +trait CsvTestDataMeta { + + String testBaseFolderPath = new File(getClass().getResource('/testGridFiles').toURI()).absolutePath + String graphicsFolderPath = testBaseFolderPath.concat(File.separator).concat("graphics") + String typeFolderPath = testBaseFolderPath.concat(File.separator).concat("types") + String gridFolderPath = testBaseFolderPath.concat(File.separator).concat("grid") + String participantsFolderPath = testBaseFolderPath.concat(File.separator).concat("participants") + String thermalFolderPath = testBaseFolderPath.concat(File.separator).concat("thermal") + + String csvSep = "," + FileNamingStrategy fileNamingStrategy = new FileNamingStrategy() +} \ No newline at end of file diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvThermalSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvThermalSourceTest.groovy new file mode 100644 index 000000000..e95880321 --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvThermalSourceTest.groovy @@ -0,0 +1,174 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.source.csv + +import edu.ie3.datamodel.io.FileNamingStrategy +import edu.ie3.datamodel.io.factory.input.AssetInputEntityData +import edu.ie3.datamodel.io.factory.input.ThermalUnitInputEntityData +import edu.ie3.datamodel.models.input.OperatorInput +import edu.ie3.datamodel.models.input.thermal.ThermalBusInput +import edu.ie3.datamodel.models.input.thermal.ThermalUnitInput +import edu.ie3.test.common.SystemParticipantTestData as sptd +import edu.ie3.test.common.ThermalUnitInputTestData +import spock.lang.Specification + +import java.util.stream.Collectors + +class CsvThermalSourceTest extends Specification implements CsvTestDataMeta { + + def "A CsvThermalSource should return ThermalBuses from valid and invalid input data as expected"() { + given: + def csvTypeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + def csvThermalSource = new CsvThermalSource(csvSep, thermalFolderPath, fileNamingStrategy, csvTypeSource) + def operators = csvTypeSource.operators + + //test method when no operators are provided as constructor parameters + when: + def resultingThermalBusesWoOperator = csvThermalSource.getThermalBuses() + + then: + resultingThermalBusesWoOperator.size() == 1 + resultingThermalBusesWoOperator.first().uuid == sptd.thermalBus.uuid + resultingThermalBusesWoOperator.first().id == sptd.thermalBus.id + resultingThermalBusesWoOperator.first().operator == sptd.thermalBus.operator + resultingThermalBusesWoOperator.first().operationTime == sptd.thermalBus.operationTime + + //test method when operators are provided as constructor parameters + when: + def resultingThermalBuses = csvThermalSource.getThermalBuses(operators) + + then: + resultingThermalBuses.size() == 1 + resultingThermalBuses.first().uuid == sptd.thermalBus.uuid + resultingThermalBuses.first().id == sptd.thermalBus.id + resultingThermalBuses.first().operator == sptd.thermalBus.operator + resultingThermalBuses.first().operationTime == sptd.thermalBus.operationTime + } + + def "A CsvThermalSource should return a CylindricalStorageInput from valid and invalid input data as expected"() { + given: + def csvTypeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + def csvThermalSource = new CsvThermalSource(csvSep, thermalFolderPath, fileNamingStrategy, csvTypeSource) + def operators = csvTypeSource.operators + def thermalBuses = csvThermalSource.thermalBuses + + //test method when operators and thermal buses are not provided as constructor parameters + when: + def resultingCylindricalStorageWoOperator = csvThermalSource.getCylindricStorages() + + then: + resultingCylindricalStorageWoOperator.size() == 1 + resultingCylindricalStorageWoOperator.first().uuid == sptd.thermalStorage.uuid + resultingCylindricalStorageWoOperator.first().id == sptd.thermalStorage.id + resultingCylindricalStorageWoOperator.first().operator == sptd.thermalStorage.operator + resultingCylindricalStorageWoOperator.first().operationTime == sptd.thermalStorage.operationTime + resultingCylindricalStorageWoOperator.first().thermalBus == sptd.thermalStorage.thermalBus + resultingCylindricalStorageWoOperator.first().storageVolumeLvl == sptd.storageVolumeLvl + resultingCylindricalStorageWoOperator.first().storageVolumeLvlMin == sptd.storageVolumeLvlMin + resultingCylindricalStorageWoOperator.first().inletTemp == sptd.inletTemp + resultingCylindricalStorageWoOperator.first().returnTemp == sptd.returnTemp + resultingCylindricalStorageWoOperator.first().c == sptd.c + + //test method when operators and thermal buses are provided as constructor parameters + when: + def resultingCylindricalStorage = csvThermalSource.getCylindricStorages(operators, thermalBuses) + + then: + resultingCylindricalStorage.size() == 1 + resultingCylindricalStorage.first().uuid == sptd.thermalStorage.uuid + resultingCylindricalStorage.first().id == sptd.thermalStorage.id + resultingCylindricalStorage.first().operator == sptd.thermalStorage.operator + resultingCylindricalStorage.first().operationTime == sptd.thermalStorage.operationTime + resultingCylindricalStorage.first().thermalBus == sptd.thermalStorage.thermalBus + resultingCylindricalStorage.first().storageVolumeLvl == sptd.storageVolumeLvl + resultingCylindricalStorage.first().storageVolumeLvlMin == sptd.storageVolumeLvlMin + resultingCylindricalStorage.first().inletTemp == sptd.inletTemp + resultingCylindricalStorage.first().returnTemp == sptd.returnTemp + resultingCylindricalStorage.first().c == sptd.c + + } + + def "A CsvThermalSource should build thermal unit input entity from valid and invalid input data as expected"() { + given: + def csvTypeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + def csvThermalSource = new CsvThermalSource(csvSep, thermalFolderPath, fileNamingStrategy, csvTypeSource) + def operator = new OperatorInput(UUID.fromString("8f9682df-0744-4b58-a122-f0dc730f6510"), "testOperator") + def validFieldsToAttributes = [ + "uuid" : "717af017-cc69-406f-b452-e022d7fb516a", + "id" : "test_thermal_unit", + "operator" : "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom" : "2020-03-24 15:11:31", + "operatesUntil" : "2020-03-25 15:11:31", + "thermalBus" : "0d95d7f2-49fb-4d49-8636-383a5220384e" + ] + def assetInputEntityData = new AssetInputEntityData(validFieldsToAttributes, ThermalUnitInput, operator) + + when: + def resultingDataOpt = csvThermalSource.buildThermalUnitInputEntityData(assetInputEntityData, thermalBuses).collect(Collectors.toList()) + + then: + resultingDataOpt.size() == 1 + resultingDataOpt.first().isPresent() == resultIsPresent + resultingDataOpt.first().ifPresent({ resultingData -> + assert (resultingData == expectedThermalUnitInputEntityData) + }) + + where: + thermalBuses || resultIsPresent || expectedThermalUnitInputEntityData + []|| false || null // thermal buses are not present -> method should return an empty optional -> do not check for thermal unit entity data + [ + new ThermalBusInput(UUID.fromString("0d95d7f2-49fb-4d49-8636-383a5220384e"), "test_thermal_bus") + ]|| true || + new ThermalUnitInputEntityData(["uuid": "717af017-cc69-406f-b452-e022d7fb516a", + "id": "test_thermal_unit", + "operator": "8f9682df-0744-4b58-a122-f0dc730f6510", + "operatesFrom": "2020-03-24 15:11:31", + "operatesUntil": "2020-03-25 15:11:31"], + ThermalUnitInput, + new OperatorInput(UUID.fromString("8f9682df-0744-4b58-a122-f0dc730f6510"), "testOperator"), + new ThermalBusInput(UUID.fromString("0d95d7f2-49fb-4d49-8636-383a5220384e"), "test_thermal_bus")) + + } + + def "A CsvThermalSource should return a ThermalHouseInput from valid and invalid input data as expected"() { + given: + def csvTypeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + def csvThermalSource = new CsvThermalSource(csvSep, thermalFolderPath, fileNamingStrategy, csvTypeSource) + def operators = csvTypeSource.operators + def thermalBuses = csvThermalSource.thermalBuses + + //test method when operators and thermal buses are not provided as constructor parameters + when: + def resultingThermalHouseWoOperator = csvThermalSource.getThermalHouses() + + then: + resultingThermalHouseWoOperator.size() == 1 + resultingThermalHouseWoOperator.first().uuid == ThermalUnitInputTestData.thermalHouseInput.uuid + resultingThermalHouseWoOperator.first().id == ThermalUnitInputTestData.thermalHouseInput.id + resultingThermalHouseWoOperator.first().operator == ThermalUnitInputTestData.thermalHouseInput.operator + resultingThermalHouseWoOperator.first().operationTime.isLimited() + resultingThermalHouseWoOperator.first().operationTime == ThermalUnitInputTestData.thermalHouseInput.operationTime + resultingThermalHouseWoOperator.first().thermalBus == ThermalUnitInputTestData.thermalHouseInput.thermalBus + resultingThermalHouseWoOperator.first().ethLosses == ThermalUnitInputTestData.thermalHouseInput.ethLosses + resultingThermalHouseWoOperator.first().ethCapa == ThermalUnitInputTestData.thermalHouseInput.ethCapa + + //test method when operators and thermal buses are provided as constructor parameters + when: + def resultingThermalHouse = csvThermalSource.getThermalHouses(operators, thermalBuses) + + then: + resultingThermalHouse.size() == 1 + resultingThermalHouse.first().uuid == ThermalUnitInputTestData.thermalHouseInput.uuid + resultingThermalHouse.first().id == ThermalUnitInputTestData.thermalHouseInput.id + resultingThermalHouse.first().operator == ThermalUnitInputTestData.thermalHouseInput.operator + resultingThermalHouse.first().operationTime.isLimited() + resultingThermalHouse.first().operationTime == ThermalUnitInputTestData.thermalHouseInput.operationTime + resultingThermalHouseWoOperator.first().thermalBus == ThermalUnitInputTestData.thermalHouseInput.thermalBus + resultingThermalHouse.first().ethLosses == ThermalUnitInputTestData.thermalHouseInput.ethLosses + resultingThermalHouse.first().ethCapa == ThermalUnitInputTestData.thermalHouseInput.ethCapa + + } +} diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTypeSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTypeSourceTest.groovy new file mode 100644 index 000000000..01d251073 --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTypeSourceTest.groovy @@ -0,0 +1,208 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.source.csv + +import edu.ie3.datamodel.io.FileNamingStrategy +import edu.ie3.datamodel.models.input.OperatorInput +import spock.lang.Specification +import edu.ie3.test.common.GridTestData as gtd +import edu.ie3.test.common.SystemParticipantTestData as sptd + + +class CsvTypeSourceTest extends Specification implements CsvTestDataMeta { + + def "A CsvTypeSource should read and handle valid 2W Transformer type file as expected"() { + given: + def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + + expect: + def transformer2WTypes = typeSource.transformer2WTypes + def transformerToBeFound = transformer2WTypes.find {trafoType -> + trafoType.uuid ==gtd.transformerTypeBtoD.uuid + } + transformerToBeFound.id == gtd.transformerTypeBtoD.id + transformerToBeFound.rSc == gtd.transformerTypeBtoD.rSc + transformerToBeFound.xSc == gtd.transformerTypeBtoD.xSc + transformerToBeFound.sRated == gtd.transformerTypeBtoD.sRated + transformerToBeFound.vRatedA == gtd.transformerTypeBtoD.vRatedA + transformerToBeFound.vRatedB == gtd.transformerTypeBtoD.vRatedB + transformerToBeFound.gM == gtd.transformerTypeBtoD.gM + transformerToBeFound.bM == gtd.transformerTypeBtoD.bM + transformerToBeFound.dV == gtd.transformerTypeBtoD.dV + transformerToBeFound.dPhi == gtd.transformerTypeBtoD.dPhi + transformerToBeFound.tapSide == gtd.transformerTypeBtoD.tapSide + transformerToBeFound.tapNeutr == gtd.transformerTypeBtoD.tapNeutr + transformerToBeFound.tapMin == gtd.transformerTypeBtoD.tapMin + transformerToBeFound.tapMax == gtd.transformerTypeBtoD.tapMax + } + + def "A CsvTypeSource should read and handle valid operator file as expected"() { + given: + def firstOperator = new OperatorInput( + UUID.fromString("f15105c4-a2de-4ab8-a621-4bc98e372d92"), "Univ.-Prof. Dr. rer. hort. Klaus-Dieter Brokkoli") + def secondOperator = new OperatorInput( + UUID.fromString("8f9682df-0744-4b58-a122-f0dc730f6510"), "TestOperator") + def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + + expect: + def operators = typeSource.operators + operators.first().uuid == firstOperator.uuid + operators.first().id == firstOperator.id + operators[1].uuid == secondOperator.uuid + operators[1].id == secondOperator.id + } + + def "A CsvTypeSource should read and handle valid line type file as expected"() { + given: + def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + + expect: + def lineTypes = typeSource.lineTypes + lineTypes.first().uuid == gtd.lineTypeInputCtoD.uuid + lineTypes.first().id == gtd.lineTypeInputCtoD.id + lineTypes.first().b == gtd.lineTypeInputCtoD.b + lineTypes.first().g == gtd.lineTypeInputCtoD.g + lineTypes.first().r == gtd.lineTypeInputCtoD.r + lineTypes.first().x == gtd.lineTypeInputCtoD.x + lineTypes.first().iMax == gtd.lineTypeInputCtoD.iMax + lineTypes.first().vRated == gtd.lineTypeInputCtoD.vRated + } + + def "A CsvTypeSource should read and handle valid 3W Transformer type file as expected"() { + given: + def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + + expect: + def transformer3WTypes = typeSource.transformer3WTypes + transformer3WTypes.first().uuid == gtd.transformerTypeAtoBtoC.uuid + transformer3WTypes.first().id == gtd.transformerTypeAtoBtoC.id + transformer3WTypes.first().sRatedA == gtd.transformerTypeAtoBtoC.sRatedA + transformer3WTypes.first().sRatedB == gtd.transformerTypeAtoBtoC.sRatedB + transformer3WTypes.first().sRatedC == gtd.transformerTypeAtoBtoC.sRatedC + transformer3WTypes.first().vRatedA == gtd.transformerTypeAtoBtoC.vRatedA + transformer3WTypes.first().vRatedB == gtd.transformerTypeAtoBtoC.vRatedB + transformer3WTypes.first().vRatedC == gtd.transformerTypeAtoBtoC.vRatedC + transformer3WTypes.first().rScA == gtd.transformerTypeAtoBtoC.rScA + transformer3WTypes.first().rScB == gtd.transformerTypeAtoBtoC.rScB + transformer3WTypes.first().rScC == gtd.transformerTypeAtoBtoC.rScC + transformer3WTypes.first().xScA == gtd.transformerTypeAtoBtoC.xScA + transformer3WTypes.first().xScB == gtd.transformerTypeAtoBtoC.xScB + transformer3WTypes.first().xScC == gtd.transformerTypeAtoBtoC.xScC + transformer3WTypes.first().gM == gtd.transformerTypeAtoBtoC.gM + transformer3WTypes.first().bM == gtd.transformerTypeAtoBtoC.bM + transformer3WTypes.first().dV == gtd.transformerTypeAtoBtoC.dV + transformer3WTypes.first().dPhi == gtd.transformerTypeAtoBtoC.dPhi + transformer3WTypes.first().tapNeutr == gtd.transformerTypeAtoBtoC.tapNeutr + transformer3WTypes.first().tapMin == gtd.transformerTypeAtoBtoC.tapMin + transformer3WTypes.first().tapMax == gtd.transformerTypeAtoBtoC.tapMax + } + + def "A CsvTypeSource should read and handle valid bm type file as expected"() { + given: + def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + + expect: + def bmTypes = typeSource.bmTypes + bmTypes.first().uuid == sptd.bmTypeInput.uuid + bmTypes.first().id == sptd.bmTypeInput.id + bmTypes.first().capex == sptd.bmTypeInput.capex + bmTypes.first().opex == sptd.bmTypeInput.opex + bmTypes.first().cosphiRated == sptd.bmTypeInput.cosphiRated + bmTypes.first().activePowerGradient == sptd.bmTypeInput.activePowerGradient + bmTypes.first().etaConv == sptd.bmTypeInput.etaConv + } + + def "A CsvTypeSource should read and handle valid chp type file as expected"() { + given: + def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + + expect: + def chpTypes = typeSource.chpTypes + chpTypes.first().uuid == sptd.chpTypeInput.uuid + chpTypes.first().id == sptd.chpTypeInput.id + chpTypes.first().capex == sptd.chpTypeInput.capex + chpTypes.first().opex == sptd.chpTypeInput.opex + chpTypes.first().etaEl == sptd.chpTypeInput.etaEl + chpTypes.first().etaThermal == sptd.chpTypeInput.etaThermal + chpTypes.first().sRated == sptd.chpTypeInput.sRated + chpTypes.first().pThermal == sptd.chpTypeInput.pThermal + chpTypes.first().pOwn == sptd.chpTypeInput.pOwn + } + + def "A CsvTypeSource should read and handle valid hp type file as expected"() { + given: + def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + + expect: + def hpTypes = typeSource.hpTypes + hpTypes.first().uuid == sptd.hpTypeInput.uuid + hpTypes.first().id == sptd.hpTypeInput.id + hpTypes.first().capex == sptd.hpTypeInput.capex + hpTypes.first().opex == sptd.hpTypeInput.opex + hpTypes.first().sRated == sptd.hpTypeInput.sRated + hpTypes.first().cosphiRated == sptd.hpTypeInput.cosphiRated + hpTypes.first().pThermal == sptd.hpTypeInput.pThermal + } + + def "A CsvTypeSource should read and handle valid storage type file as expected"() { + given: + def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + + expect: + def storageTypes = typeSource.storageTypes + storageTypes.first().uuid == sptd.storageTypeInput.uuid + storageTypes.first().id == sptd.storageTypeInput.id + storageTypes.first().capex == sptd.storageTypeInput.capex + storageTypes.first().opex == sptd.storageTypeInput.opex + storageTypes.first().eStorage == sptd.storageTypeInput.eStorage + storageTypes.first().sRated == sptd.storageTypeInput.sRated + storageTypes.first().cosphiRated == sptd.storageTypeInput.cosphiRated + storageTypes.first().pMax == sptd.storageTypeInput.pMax + storageTypes.first().activePowerGradient == sptd.storageTypeInput.activePowerGradient + storageTypes.first().eta == sptd.storageTypeInput.eta + storageTypes.first().dod == sptd.storageTypeInput.dod + storageTypes.first().lifeTime == sptd.storageTypeInput.lifeTime + storageTypes.first().lifeCycle == sptd.storageTypeInput.lifeCycle + } + + def "A CsvTypeSource should read and handle valid wec type file as expected"() { + given: + def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + + expect: + def wecTypes = typeSource.wecTypes + wecTypes.first().uuid == sptd.wecType.uuid + wecTypes.first().id == sptd.wecType.id + wecTypes.first().capex == sptd.wecType.capex + wecTypes.first().opex == sptd.wecType.opex + wecTypes.first().cosphiRated == sptd.wecType.cosphiRated + wecTypes.first().etaConv == sptd.wecType.etaConv + wecTypes.first().sRated == sptd.wecType.sRated + wecTypes.first().rotorArea == sptd.wecType.rotorArea + wecTypes.first().hubHeight == sptd.wecType.hubHeight + wecTypes.first().cpCharacteristic == sptd.wecType.cpCharacteristic + //check for the individual points + if (wecTypes.first().cpCharacteristic.points.iterator().hasNext()) + wecTypes.first().cpCharacteristic.points.iterator().next() == sptd.wecType.cpCharacteristic.points.iterator().next() + + } + + def "A CsvTypeSource should read and handle valid ev type file as expected"() { + given: + def typeSource = new CsvTypeSource(",", typeFolderPath, new FileNamingStrategy()) + + expect: + def evTypes = typeSource.evTypes + evTypes.first().uuid == sptd.evTypeInput.uuid + evTypes.first().id == sptd.evTypeInput.id + evTypes.first().capex == sptd.evTypeInput.capex + evTypes.first().opex == sptd.evTypeInput.opex + evTypes.first().eStorage == sptd.evTypeInput.eStorage + evTypes.first().eCons == sptd.evTypeInput.eCons + evTypes.first().sRated == sptd.evTypeInput.sRated + evTypes.first().cosphiRated == sptd.evTypeInput.cosphiRated + } +} diff --git a/src/test/groovy/edu/ie3/datamodel/utils/ContainerUtilTest.groovy b/src/test/groovy/edu/ie3/datamodel/utils/ContainerUtilsTest.groovy similarity index 76% rename from src/test/groovy/edu/ie3/datamodel/utils/ContainerUtilTest.groovy rename to src/test/groovy/edu/ie3/datamodel/utils/ContainerUtilsTest.groovy index bb3578998..11db853fd 100644 --- a/src/test/groovy/edu/ie3/datamodel/utils/ContainerUtilTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/utils/ContainerUtilsTest.groovy @@ -5,6 +5,8 @@ */ package edu.ie3.datamodel.utils +import static edu.ie3.datamodel.models.voltagelevels.GermanVoltageLevelUtils.* +import static edu.ie3.util.quantities.PowerSystemUnits.PU import edu.ie3.datamodel.exceptions.InvalidGridException import edu.ie3.datamodel.graph.SubGridTopologyGraph import edu.ie3.datamodel.models.OperationTime @@ -23,15 +25,12 @@ import tec.uom.se.quantity.Quantities import java.time.ZoneId -import static edu.ie3.datamodel.models.voltagelevels.GermanVoltageLevelUtils.* import edu.ie3.datamodel.models.voltagelevels.VoltageLevel import edu.ie3.test.common.ComplexTopology import spock.lang.Shared import spock.lang.Specification -import static edu.ie3.util.quantities.PowerSystemUnits.PU - -class ContainerUtilTest extends Specification { +class ContainerUtilsTest extends Specification { static { TimeTools.initialize(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd HH:mm:ss") } @@ -39,54 +38,54 @@ class ContainerUtilTest extends Specification { @Shared GridContainer complexTopology = ComplexTopology.grid - def "The container utils filter raw grid elements correctly for a given subnet" () { + def "The container utils filter raw grid elements correctly for a given subnet"() { when: - RawGridElements actual = ContainerUtils.filterForSubnet(complexTopology.getRawGrid(), subnet) + RawGridElements actual = ContainerUtils.filterForSubnet(complexTopology.rawGrid, subnet) then: - actual.getNodes() == expectedNodes - actual.getTransformer2Ws() == expectedTransformers2W - actual.getTransformer3Ws() == expectedTransformers3W + actual.nodes == expectedNodes + actual.transformer2Ws == expectedTransformers2W + actual.transformer3Ws == expectedTransformers3W /* TODO: Add lines, switches etc. to testing data */ where: - subnet || expectedNodes || expectedTransformers2W || expectedTransformers3W - 1 || [ + subnet || expectedNodes || expectedTransformers2W || expectedTransformers3W + 1 || [ ComplexTopology.nodeA, ComplexTopology.nodeB, - ComplexTopology.nodeC] as Set || [] as Set || [ + ComplexTopology.nodeC] as Set || [] as Set || [ ComplexTopology.transformerAtoBtoC] as Set - 2 || [ + 2 || [ ComplexTopology.nodeA, ComplexTopology.nodeB, - ComplexTopology.nodeC] as Set || [] as Set || [ + ComplexTopology.nodeC] as Set || [] as Set || [ ComplexTopology.transformerAtoBtoC] as Set - 3 || [ + 3 || [ ComplexTopology.nodeA, ComplexTopology.nodeB, - ComplexTopology.nodeC] as Set || [] as Set || [ + ComplexTopology.nodeC] as Set || [] as Set || [ ComplexTopology.transformerAtoBtoC] as Set - 4 || [ + 4 || [ ComplexTopology.nodeB, - ComplexTopology.nodeD] as Set || [ - ComplexTopology.transformerBtoD] as Set || [] as Set - 5 || [ + ComplexTopology.nodeD] as Set || [ + ComplexTopology.transformerBtoD] as Set || [] as Set + 5 || [ ComplexTopology.nodeB, ComplexTopology.nodeC, ComplexTopology.nodeE] as Set || [ ComplexTopology.transformerBtoE, - ComplexTopology.transformerCtoE] as Set || [] as Set - 6 || [ + ComplexTopology.transformerCtoE] as Set || [] as Set + 6 || [ ComplexTopology.nodeC, ComplexTopology.nodeF, ComplexTopology.nodeG] as Set || [ ComplexTopology.transformerCtoF, - ComplexTopology.transformerCtoG] as Set || [] as Set + ComplexTopology.transformerCtoG] as Set || [] as Set } - def "The container utils are able to derive the predominant voltage level" () { + def "The container utils are able to derive the predominant voltage level"() { given: - RawGridElements rawGrid = ContainerUtils.filterForSubnet(complexTopology.getRawGrid(), subnet) + RawGridElements rawGrid = ContainerUtils.filterForSubnet(complexTopology.rawGrid, subnet) when: VoltageLevel actual = ContainerUtils.determinePredominantVoltLvl(rawGrid, subnet) @@ -95,18 +94,18 @@ class ContainerUtilTest extends Specification { actual == expected where: - subnet || expected - 1 || EHV_380KV - 2 || HV - 3 || MV_20KV - 4 || MV_20KV - 5 || MV_10KV - 6 || LV + subnet || expected + 1 || EHV_380KV + 2 || HV + 3 || MV_20KV + 4 || MV_20KV + 5 || MV_10KV + 6 || LV } - def "The container utils throw an exception, when there is an ambiguous voltage level in the grid" () { + def "The container utils throw an exception, when there is an ambiguous voltage level in the grid"() { given: - RawGridElements rawGrid = ContainerUtils.filterForSubnet(complexTopology.getRawGrid(), 4) + RawGridElements rawGrid = ContainerUtils.filterForSubnet(complexTopology.rawGrid, 4) NodeInput corruptNode = new NodeInput( UUID.randomUUID(), "node_e", OperatorInput.NO_OPERATOR_ASSIGNED, @@ -136,15 +135,15 @@ class ContainerUtilTest extends Specification { "ms_20kv, mv, mv_20kV], voltageRange=Interval [20.0 kV, 30.0 kV)}" } - def "The container util determines the set of subnet number correctly" () { + def "The container util determines the set of subnet number correctly"() { expect: - ContainerUtils.determineSubnetNumbers(ComplexTopology.grid.getRawGrid().getNodes()) == [1, 2, 3, 4, 5, 6] as Set + ContainerUtils.determineSubnetNumbers(ComplexTopology.grid.rawGrid.nodes) == [1, 2, 3, 4, 5, 6] as Set } - def "The container util builds the sub grid containers correctly" () { + def "The container util builds the sub grid containers correctly"() { given: - String gridName = ComplexTopology.grid.getGridName() - Set subNetNumbers = ContainerUtils.determineSubnetNumbers(ComplexTopology.grid.getRawGrid().getNodes()) + String gridName = ComplexTopology.grid.gridName + Set subNetNumbers = ContainerUtils.determineSubnetNumbers(ComplexTopology.grid.rawGrid.nodes) RawGridElements rawGrid = ComplexTopology.grid.rawGrid SystemParticipants systemParticipants = ComplexTopology.grid.systemParticipants GraphicElements graphics = ComplexTopology.grid.graphics @@ -160,19 +159,19 @@ class ContainerUtilTest extends Specification { then: actual.size() == 6 - for(Map.Entry entry: actual){ - int subnetNo = entry.getKey() - SubGridContainer actualSubGrid = entry.getValue() + for (Map.Entry entry : actual) { + int subnetNo = entry.key + SubGridContainer actualSubGrid = entry.value SubGridContainer expectedSubGrid = expectedSubGrids.get(subnetNo) assert actualSubGrid == expectedSubGrid } } - def "The container util builds the correct sub grid dependency graph" () { + def "The container util builds the correct sub grid dependency graph"() { given: - String gridName = ComplexTopology.grid.getGridName() - Set subNetNumbers = ContainerUtils.determineSubnetNumbers(ComplexTopology.grid.getRawGrid().getNodes()) + String gridName = ComplexTopology.grid.gridName + Set subNetNumbers = ContainerUtils.determineSubnetNumbers(ComplexTopology.grid.rawGrid.nodes) RawGridElements rawGrid = ComplexTopology.grid.rawGrid SystemParticipants systemParticipants = ComplexTopology.grid.systemParticipants GraphicElements graphics = ComplexTopology.grid.graphics @@ -182,8 +181,8 @@ class ContainerUtilTest extends Specification { rawGrid, systemParticipants, graphics) - Set transformer2ws = ComplexTopology.grid.rawGrid.getTransformer2Ws() - Set transformer3ws = ComplexTopology.grid.rawGrid.getTransformer3Ws() + Set transformer2ws = ComplexTopology.grid.rawGrid.transformer2Ws + Set transformer3ws = ComplexTopology.grid.rawGrid.transformer3Ws SubGridTopologyGraph expectedSubGridTopology = ComplexTopology.expectedSubGridTopology when: @@ -196,7 +195,7 @@ class ContainerUtilTest extends Specification { actual == expectedSubGridTopology } - def "The container util builds the correct assembly of sub grids from basic information" () { + def "The container util builds the correct assembly of sub grids from basic information"() { given: String gridName = ComplexTopology.gridName RawGridElements rawGrid = ComplexTopology.grid.rawGrid @@ -215,7 +214,7 @@ class ContainerUtilTest extends Specification { actual == expectedSubGridTopology } - def "The container utils build a joint model correctly from sub grids" () { + def "The container utils build a joint model correctly from sub grids"() { given: Collection subGridContainers = ComplexTopology.expectedSubGrids.values() JointGridContainer expected = ComplexTopology.grid diff --git a/src/test/groovy/edu/ie3/datamodel/utils/ValidationUtilsTest.groovy b/src/test/groovy/edu/ie3/datamodel/utils/ValidationUtilsTest.groovy new file mode 100644 index 000000000..89574299b --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/utils/ValidationUtilsTest.groovy @@ -0,0 +1,83 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.utils + +import static edu.ie3.util.quantities.PowerSystemUnits.PU +import edu.ie3.datamodel.models.OperationTime +import edu.ie3.datamodel.models.input.NodeInput +import edu.ie3.datamodel.models.input.OperatorInput +import edu.ie3.datamodel.models.voltagelevels.GermanVoltageLevelUtils +import edu.ie3.test.common.GridTestData +import edu.ie3.util.TimeTools +import spock.lang.Specification +import tec.uom.se.quantity.Quantities + +import java.time.ZoneId + +class ValidationUtilsTest extends Specification { + + static { + TimeTools.initialize(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd HH:mm:ss") + } + + def "The validation utils should determine if a collection with UniqueEntity's is distinct by their uuid"() { + + expect: + ValidationUtils.distinctUuids(collection) == distinct + + where: + collection || distinct + [ + GridTestData.nodeF, + new NodeInput( + UUID.fromString("9e37ce48-9650-44ec-b888-c2fd182aff01"), "node_g", OperatorInput.NO_OPERATOR_ASSIGNED, + OperationTime.notLimited() + , + Quantities.getQuantity(1d, PU), + false, + null, + GermanVoltageLevelUtils.LV, + 6)] as Set || false + [ + GridTestData.nodeD, + GridTestData.nodeE] as Set || true + [] as Set || true + } + + def "The validation utils should check for duplicates as expected"() { + + expect: + ValidationUtils.checkForDuplicateUuids(collection) == checkResult + + where: + collection || checkResult + [ + new NodeInput( + UUID.fromString("9e37ce48-9650-44ec-b888-c2fd182aff01"), "node_f", GridTestData.profBroccoli, + OperationTime.notLimited() + , + Quantities.getQuantity(1d, PU), + false, + null, + GermanVoltageLevelUtils.LV, + 6), + new NodeInput( + UUID.fromString("9e37ce48-9650-44ec-b888-c2fd182aff01"), "node_g", GridTestData.profBroccoli, + OperationTime.notLimited() + , + Quantities.getQuantity(1d, PU), + false, + null, + GermanVoltageLevelUtils.LV, + 6)] as Set || Optional.of("9e37ce48-9650-44ec-b888-c2fd182aff01: 2\n" + + " - NodeInput{uuid=9e37ce48-9650-44ec-b888-c2fd182aff01, id='node_f', operator=OperatorInput{uuid=f15105c4-a2de-4ab8-a621-4bc98e372d92, id='Univ.-Prof. Dr. rer. hort. Klaus-Dieter Brokkoli'}, operationTime=OperationTime{startDate=null, endDate=null, isLimited=false}, vTarget=1.0 PU, slack=false, geoPosition=null, voltLvl=CommonVoltageLevel{id='Niederspannung', nominalVoltage=0.4 kV, synonymousIds=[Niederspannung, lv, ns], voltageRange=Interval [0.0 kV, 10.0 kV)}, subnet=6}\n" + + " - NodeInput{uuid=9e37ce48-9650-44ec-b888-c2fd182aff01, id='node_g', operator=OperatorInput{uuid=f15105c4-a2de-4ab8-a621-4bc98e372d92, id='Univ.-Prof. Dr. rer. hort. Klaus-Dieter Brokkoli'}, operationTime=OperationTime{startDate=null, endDate=null, isLimited=false}, vTarget=1.0 PU, slack=false, geoPosition=null, voltLvl=CommonVoltageLevel{id='Niederspannung', nominalVoltage=0.4 kV, synonymousIds=[Niederspannung, lv, ns], voltageRange=Interval [0.0 kV, 10.0 kV)}, subnet=6}") + [ + GridTestData.nodeD, + GridTestData.nodeE] as Set || Optional.empty() + [] as Set || Optional.empty() + } +} diff --git a/src/test/groovy/edu/ie3/test/common/ComplexTopology.groovy b/src/test/groovy/edu/ie3/test/common/ComplexTopology.groovy index ea30b7ac9..77361fe99 100644 --- a/src/test/groovy/edu/ie3/test/common/ComplexTopology.groovy +++ b/src/test/groovy/edu/ie3/test/common/ComplexTopology.groovy @@ -51,6 +51,7 @@ class ComplexTopology extends GridTestData { [] as Set, [] as Set, [] as Set, + [] as Set, [] as Set), new GraphicElements( [] as Set, @@ -80,6 +81,7 @@ class ComplexTopology extends GridTestData { [] as Set, [] as Set, [] as Set, + [] as Set, [] as Set), new GraphicElements( [] as Set, @@ -105,6 +107,7 @@ class ComplexTopology extends GridTestData { [] as Set, [] as Set, [] as Set, + [] as Set, [] as Set), new GraphicElements( [] as Set, @@ -130,6 +133,7 @@ class ComplexTopology extends GridTestData { [] as Set, [] as Set, [] as Set, + [] as Set, [] as Set), new GraphicElements( [] as Set, @@ -155,6 +159,7 @@ class ComplexTopology extends GridTestData { [] as Set, [] as Set, [] as Set, + [] as Set, [] as Set), new GraphicElements( [] as Set, @@ -182,6 +187,7 @@ class ComplexTopology extends GridTestData { [] as Set, [] as Set, [] as Set, + [] as Set, [] as Set), new GraphicElements( [] as Set, @@ -209,6 +215,7 @@ class ComplexTopology extends GridTestData { [] as Set, [] as Set, [] as Set, + [] as Set, [] as Set), new GraphicElements( [] as Set, @@ -219,7 +226,7 @@ class ComplexTopology extends GridTestData { DirectedMultigraph mutableGraph = new DirectedMultigraph<>(SubGridGate.class) /* Add all edges */ - expectedSubGrids.values().forEach({subGrid -> mutableGraph.addVertex(subGrid)}) + expectedSubGrids.values().forEach({ subGrid -> mutableGraph.addVertex(subGrid) }) mutableGraph.addEdge(expectedSubGrids.get(1), expectedSubGrids.get(2), new SubGridGate(transformerAtoBtoC, ConnectorPort.B)) mutableGraph.addEdge(expectedSubGrids.get(1), expectedSubGrids.get(3), new SubGridGate(transformerAtoBtoC, ConnectorPort.C)) diff --git a/src/test/groovy/edu/ie3/test/common/GridTestData.groovy b/src/test/groovy/edu/ie3/test/common/GridTestData.groovy index 02f6b3ae4..bb3c4f23f 100644 --- a/src/test/groovy/edu/ie3/test/common/GridTestData.groovy +++ b/src/test/groovy/edu/ie3/test/common/GridTestData.groovy @@ -20,7 +20,7 @@ import edu.ie3.datamodel.models.input.graphics.LineGraphicInput import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput import edu.ie3.datamodel.models.input.system.characteristic.OlmCharacteristicInput import edu.ie3.datamodel.models.voltagelevels.GermanVoltageLevelUtils -import edu.ie3.util.TimeTools +import edu.ie3.util.TimeUtil import edu.ie3.util.quantities.PowerSystemUnits import org.locationtech.jts.geom.LineString import org.locationtech.jts.geom.Point @@ -46,6 +46,15 @@ class GridTestData { private static final GeoJsonReader geoJsonReader = new GeoJsonReader() + public static final OperationTime defaultOperationTime = OperationTime.builder(). + withStart(TimeUtil.withDefaults.toZonedDateTime("2020-03-24 15:11:31")). + withEnd(TimeUtil.withDefaults.toZonedDateTime("2020-03-25 15:11:31")).build() + + public static final OperatorInput profBroccoli = new OperatorInput( + UUID.fromString("f15105c4-a2de-4ab8-a621-4bc98e372d92"), + "Univ.-Prof. Dr. rer. hort. Klaus-Dieter Brokkoli" + ) + public static final Transformer2WTypeInput transformerTypeBtoD = new Transformer2WTypeInput( UUID.fromString("202069a7-bcf8-422c-837c-273575220c8a"), "HS-MS_1", @@ -64,7 +73,7 @@ class GridTestData { 10 ) private static final Transformer2WTypeInput transformerTypeBtoE = new Transformer2WTypeInput( - UUID.randomUUID(), + UUID.fromString("ac30443b-29e7-4635-b399-1062cfb3ffda"), "transformer_type_gedfi89fc7c895076ff25ec6d3b2e7ab9a1b24b37f73ecf30f895005d766a8d8d2774aa", Quantities.getQuantity(0d, OHM), Quantities.getQuantity(51.72750115394592, OHM), @@ -80,8 +89,9 @@ class GridTestData { 1, 19 ) + private static final Transformer2WTypeInput transformerTypeCtoE = new Transformer2WTypeInput( - UUID.randomUUID(), + UUID.fromString("8441dd78-c528-4e63-830d-52d341131432"), "no_shunt_elements_mv-mv", Quantities.getQuantity(1.5, OHM), Quantities.getQuantity(15.5, OHM), @@ -97,6 +107,7 @@ class GridTestData { -5, 5 ) + private static final Transformer2WTypeInput transformerTypeCtoX = new Transformer2WTypeInput( UUID.fromString("08559390-d7c0-4427-a2dc-97ba312ae0ac"), "MS-NS_1", @@ -140,32 +151,34 @@ class GridTestData { ) public static final NodeInput nodeA = new NodeInput( - UUID.fromString("5dc88077-aeb6-4711-9142-db57292640b1"), "node_a", new OperatorInput(UUID.fromString("8f9682df-0744-4b58-a122-f0dc730f6510"), "TestOperator"), - OperationTime.builder().withStart(TimeTools.toZonedDateTime("2020-03-24 15:11:31")).withEnd(TimeTools.toZonedDateTime("2020-03-25 15:11:31")).build() - , + UUID.fromString("4ca90220-74c2-4369-9afa-a18bf068840d"), + "node_a", + profBroccoli, + defaultOperationTime, Quantities.getQuantity(1d, PU), true, geoJsonReader.read("{ \"type\": \"Point\", \"coordinates\": [7.411111, 51.492528] }") as Point, GermanVoltageLevelUtils.EHV_380KV, 1) + public static final NodeInput nodeB = new NodeInput( UUID.fromString("47d29df0-ba2d-4d23-8e75-c82229c5c758"), "node_b", OperatorInput.NO_OPERATOR_ASSIGNED, - OperationTime.notLimited() - , + OperationTime.notLimited(), Quantities.getQuantity(1d, PU), false, - null, + NodeInput.DEFAULT_GEO_POSITION, GermanVoltageLevelUtils.HV, 2) + public static final NodeInput nodeC = new NodeInput( UUID.fromString("bd837a25-58f3-44ac-aa90-c6b6e3cd91b2"), "node_c", OperatorInput.NO_OPERATOR_ASSIGNED, - OperationTime.notLimited() - , + OperationTime.notLimited(), Quantities.getQuantity(1d, PU), false, - null, + NodeInput.DEFAULT_GEO_POSITION, GermanVoltageLevelUtils.MV_20KV, 3) + public static final NodeGraphicInput nodeGraphicC = new NodeGraphicInput( UUID.fromString("09aec636-791b-45aa-b981-b14edf171c4c"), "main", @@ -173,13 +186,13 @@ class GridTestData { nodeC, geoJsonReader.read("{ \"type\": \"Point\", \"coordinates\": [0, 10] }") as Point ) + public static final NodeInput nodeD = new NodeInput( UUID.fromString("6e0980e0-10f2-4e18-862b-eb2b7c90509b"), "node_d", OperatorInput.NO_OPERATOR_ASSIGNED, - OperationTime.notLimited() - , + OperationTime.notLimited(), Quantities.getQuantity(1d, PU), false, - null, + NodeInput.DEFAULT_GEO_POSITION, GermanVoltageLevelUtils.MV_20KV, 4) public static final NodeGraphicInput nodeGraphicD = new NodeGraphicInput( @@ -189,38 +202,45 @@ class GridTestData { nodeD, null ) + public static final NodeInput nodeE = new NodeInput( - UUID.randomUUID(), "node_e", OperatorInput.NO_OPERATOR_ASSIGNED, - OperationTime.notLimited() - , + UUID.fromString("98a3e7fa-c456-455b-a5ea-bb19e7cbeb63"), + "node_e", + OperatorInput.NO_OPERATOR_ASSIGNED, + OperationTime.notLimited(), Quantities.getQuantity(1d, PU), false, - null, + NodeInput.DEFAULT_GEO_POSITION, GermanVoltageLevelUtils.MV_10KV, 5) + public static final NodeInput nodeF = new NodeInput( - UUID.fromString("aaa74c1a-d07e-4615-99a5-e991f1d81cc4"), "node_f", OperatorInput.NO_OPERATOR_ASSIGNED, - OperationTime.notLimited() - , + UUID.fromString("9e37ce48-9650-44ec-b888-c2fd182aff01"), + "node_f", + OperatorInput.NO_OPERATOR_ASSIGNED, + OperationTime.notLimited(), Quantities.getQuantity(1d, PU), false, - null, + NodeInput.DEFAULT_GEO_POSITION, GermanVoltageLevelUtils.LV, 6) + public static final NodeInput nodeG = new NodeInput( - UUID.fromString("aaa74c1a-d07e-4615-99a5-e991f1d81cc4"), "node_g", OperatorInput.NO_OPERATOR_ASSIGNED, - OperationTime.notLimited() - , + UUID.fromString("aaa74c1a-d07e-4615-99a5-e991f1d81cc4"), + "node_g", + OperatorInput.NO_OPERATOR_ASSIGNED, + OperationTime.notLimited(), Quantities.getQuantity(1d, PU), false, - null, + NodeInput.DEFAULT_GEO_POSITION, GermanVoltageLevelUtils.LV, 6) public static final Transformer2WInput transformerBtoD = new Transformer2WInput( - UUID.randomUUID(), "2w_single_test", OperatorInput.NO_OPERATOR_ASSIGNED, - OperationTime.notLimited() - , + UUID.fromString("58247de7-e297-4d9b-a5e4-b662c058c655"), + "2w_single_test", + OperatorInput.NO_OPERATOR_ASSIGNED, + OperationTime.notLimited(), nodeB, nodeD, 1, @@ -228,10 +248,12 @@ class GridTestData { 0, true ) + public static final Transformer2WInput transformerBtoE = new Transformer2WInput( - UUID.randomUUID(), "2w_v_1", OperatorInput.NO_OPERATOR_ASSIGNED, - OperationTime.notLimited() - , + UUID.fromString("8542bfa5-dc34-4367-b549-e9f515e6cced"), + "2w_v_1", + OperatorInput.NO_OPERATOR_ASSIGNED, + OperationTime.notLimited(), nodeB, nodeE, 1, @@ -239,10 +261,12 @@ class GridTestData { 0, true ) + public static final Transformer2WInput transformerCtoE = new Transformer2WInput( - UUID.randomUUID(), "2w_v_2", OperatorInput.NO_OPERATOR_ASSIGNED, - OperationTime.notLimited() - , + UUID.fromString("0c03391d-47e1-49b3-9c9c-1616258e78a7"), + "2w_v_2", + OperatorInput.NO_OPERATOR_ASSIGNED, + OperationTime.notLimited(), nodeC, nodeE, 1, @@ -250,10 +274,11 @@ class GridTestData { 0, true ) + public static final Transformer2WInput transformerCtoF = new Transformer2WInput( - UUID.randomUUID(), "2w_parallel_1", OperatorInput.NO_OPERATOR_ASSIGNED, - OperationTime.notLimited() - , + UUID.fromString("26a3583e-8e62-40b7-ba4c-092f6fd5a70d"), + "2w_parallel_1", OperatorInput.NO_OPERATOR_ASSIGNED, + OperationTime.notLimited(), nodeC, nodeF, 1, @@ -261,10 +286,11 @@ class GridTestData { 0, true ) + public static final Transformer2WInput transformerCtoG = new Transformer2WInput( - UUID.fromString("5dc88077-aeb6-4711-9142-db57292640b1"), "2w_parallel_2", new OperatorInput(UUID.fromString("8f9682df-0744-4b58-a122-f0dc730f6510"), "TestOperator"), - OperationTime.builder().withStart(TimeTools.toZonedDateTime("2020-03-24 15:11:31")).withEnd(TimeTools.toZonedDateTime("2020-03-25 15:11:31")).build() - , + UUID.fromString("5dc88077-aeb6-4711-9142-db57292640b1"), "2w_parallel_2", + profBroccoli, + defaultOperationTime, nodeC, nodeG, 1, @@ -274,9 +300,10 @@ class GridTestData { ) public static Transformer3WInput transformerAtoBtoC = new Transformer3WInput( - UUID.fromString("5dc88077-aeb6-4711-9142-db57292640b1"), "3w_test", new OperatorInput(UUID.fromString("8f9682df-0744-4b58-a122-f0dc730f6510"), "TestOperator"), - OperationTime.builder().withStart(TimeTools.toZonedDateTime("2020-03-24 15:11:31")).withEnd(TimeTools.toZonedDateTime("2020-03-25 15:11:31")).build() - , + UUID.fromString("cc327469-7d56-472b-a0df-edbb64f90e8f"), + "3w_test", + profBroccoli, + defaultOperationTime, nodeA, nodeB, nodeC, @@ -288,9 +315,10 @@ class GridTestData { public static final SwitchInput switchAtoB = new SwitchInput( - UUID.fromString("5dc88077-aeb6-4711-9142-db57287640b1"), "test_switch_AtoB", new OperatorInput(UUID.fromString("8f9682df-0744-4b58-a122-f0dc730f6510"), "TestOperator"), - OperationTime.builder().withStart(TimeTools.toZonedDateTime("2020-03-24 15:11:31")).withEnd(TimeTools.toZonedDateTime("2020-03-25 15:11:31")).build() - , + UUID.fromString("5dc88077-aeb6-4711-9142-db57287640b1"), + "test_switch_AtoB", + profBroccoli, + defaultOperationTime, nodeA, nodeB, true @@ -309,9 +337,12 @@ class GridTestData { ) public static final LineInput lineCtoD = new LineInput( - UUID.fromString("91ec3bcf-1777-4d38-af67-0bf7c9fa73c7"), "test_line_AtoB", new OperatorInput(UUID.fromString("8f9682df-0744-4b58-a122-f0dc730f6510"), "TestOperator"), - OperationTime.builder().withStart(TimeTools.toZonedDateTime("2020-03-24 15:11:31")).withEnd(TimeTools.toZonedDateTime("2020-03-25 15:11:31")).build(), - nodeC, nodeD, + UUID.fromString("91ec3bcf-1777-4d38-af67-0bf7c9fa73c7"), + "test_line_CtoD", + profBroccoli, + defaultOperationTime, + nodeC, + nodeD, 2, lineTypeInputCtoD, Quantities.getQuantity(3, Units.METRE), @@ -325,10 +356,25 @@ class GridTestData { lineCtoD ) + public static final LineInput lineAtoB = new LineInput( + UUID.fromString("92ec3bcf-1777-4d38-af67-0bf7c9fa73c7"), + "test_line_AtoB", + profBroccoli, + defaultOperationTime, + nodeA, + nodeB, + 2, + lineTypeInputCtoD, + Quantities.getQuantity(3, Units.METRE), + geoJsonReader.read("{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.492528], [7.414116, 51.484136]]}") as LineString, + OlmCharacteristicInput.CONSTANT_CHARACTERISTIC + ) + public static final MeasurementUnitInput measurementUnitInput = new MeasurementUnitInput( - UUID.fromString("ce6119e3-f725-4166-b6e0-59f62e0c293d"), "test_measurementUnit", new OperatorInput(UUID.fromString("8f9682df-0744-4b58-a122-f0dc730f6510"), "TestOperator"), - OperationTime.builder().withStart(TimeTools.toZonedDateTime("2020-03-24 15:11:31")).withEnd(TimeTools.toZonedDateTime("2020-03-25 15:11:31")).build() - , + UUID.fromString("ce6119e3-f725-4166-b6e0-59f62e0c293d"), + "test_measurementUnit", + profBroccoli, + defaultOperationTime, nodeG, true, true, diff --git a/src/test/groovy/edu/ie3/test/common/SystemParticipantTestData.groovy b/src/test/groovy/edu/ie3/test/common/SystemParticipantTestData.groovy index 15539d2d1..d6cdc4127 100644 --- a/src/test/groovy/edu/ie3/test/common/SystemParticipantTestData.groovy +++ b/src/test/groovy/edu/ie3/test/common/SystemParticipantTestData.groovy @@ -38,6 +38,7 @@ import edu.ie3.util.quantities.interfaces.DimensionlessRate import edu.ie3.util.quantities.interfaces.EnergyPrice import edu.ie3.util.quantities.interfaces.SpecificEnergy import edu.ie3.util.quantities.interfaces.SpecificHeatCapacity +import tec.uom.se.ComparableQuantity import tec.uom.se.quantity.Quantities import javax.measure.Quantity @@ -50,6 +51,7 @@ import javax.measure.quantity.Power import javax.measure.quantity.Temperature import javax.measure.quantity.Time import javax.measure.quantity.Volume +import java.time.ZoneId import static edu.ie3.util.quantities.PowerSystemUnits.* @@ -57,12 +59,11 @@ import static edu.ie3.util.quantities.PowerSystemUnits.* class SystemParticipantTestData { // general participant data - private static final UUID participantUuid = UUID.fromString("717af017-cc69-406f-b452-e022d7fb516a") private static final OperationTime operationTime = OperationTime.builder() .withStart(TimeUtil.withDefaults.toZonedDateTime("2020-03-24 15:11:31")) .withEnd(TimeUtil.withDefaults.toZonedDateTime("2020-03-25 15:11:31")).build() private static final OperatorInput operator = new OperatorInput( - UUID.fromString("8f9682df-0744-4b58-a122-f0dc730f6510"), "SystemParticipantOperator") + UUID.fromString("8f9682df-0744-4b58-a122-f0dc730f6510"), "TestOperator") private static final NodeInput participantNode = GridTestData.nodeA // general type data @@ -72,105 +73,266 @@ class SystemParticipantTestData { public static final String cosPhiFixedDeSerialized = "cosPhiFixed:{(0.00,0.95)}" public static final String cosPhiPDeSerialized = "cosPhiP:{(0.00,1.00),(0.90,1.00),(1.20,-0.30)}" public static final String qVDeSerialized = "qV:{(0.90,-0.30),(0.95,0.00),(1.05,0.00),(1.10,0.30)}" - private static final Quantity sRated = Quantities.getQuantity(25, KILOVOLTAMPERE) + private static final ComparableQuantity sRated = Quantities.getQuantity(25d, KILOVOLTAMPERE) private static final double cosPhiRated = 0.95 private static final UUID typeUuid = UUID.fromString("5ebd8f7e-dedb-4017-bb86-6373c4b68eb8") - private static final Quantity capex = Quantities.getQuantity(100, EURO) - private static final Quantity opex = Quantities.getQuantity(50, EURO_PER_MEGAWATTHOUR) - private static final Quantity etaConv = Quantities.getQuantity(98, PERCENT) + private static final ComparableQuantity capex = Quantities.getQuantity(100d, EURO) + private static final ComparableQuantity opex = Quantities.getQuantity(50d, EURO_PER_MEGAWATTHOUR) + private static final ComparableQuantity etaConv = Quantities.getQuantity(98d, PERCENT) // FixedFeedInput - public static final FixedFeedInInput fixedFeedInInput = new FixedFeedInInput(participantUuid, "test_fixedFeedInInput", operator, - operationTime, participantNode, cosPhiFixed, - sRated, cosPhiRated) + public static final FixedFeedInInput fixedFeedInInput = new FixedFeedInInput( + UUID.fromString("717af017-cc69-406f-b452-e022d7fb516a"), + "test_fixedFeedInInput", + operator, + operationTime, + participantNode, + cosPhiFixed, + sRated, + cosPhiRated + ) // PV private static final double albedo = 0.20000000298023224 - private static final Quantity azimuth = Quantities.getQuantity(-8.926613807678223, DEGREE_GEOM) - private static final Quantity height = Quantities.getQuantity(41.01871871948242, DEGREE_GEOM) + private static final ComparableQuantity azimuth = Quantities.getQuantity(-8.926613807678223, DEGREE_GEOM) + private static final ComparableQuantity height = Quantities.getQuantity(41.01871871948242, DEGREE_GEOM) private static double kT = 1 private static double kG = 0.8999999761581421 - public static final PvInput pvInput = new PvInput(participantUuid, "test_pvInput", operator, operationTime, - participantNode, cosPhiFixed, albedo, azimuth, - etaConv, height, kG, kT, false, sRated, cosPhiRated) + public static final PvInput pvInput = new PvInput( + UUID.fromString("d56f15b7-8293-4b98-b5bd-58f6273ce229"), + "test_pvInput", + operator, + operationTime, + participantNode, + cosPhiFixed, + albedo, + azimuth, + etaConv, + height, + kG, + kT, + false, + sRated, + cosPhiRated + ) // WEC private static final WecCharacteristicInput wecCharacteristic = new WecCharacteristicInput("cP:{(10.00,0.05),(15.00,0.10),(20.00,0.20)}") - private static final Quantity rotorArea = Quantities.getQuantity(20, SQUARE_METRE) - private static final Quantity hubHeight = Quantities.getQuantity(200, METRE) - public static final WecTypeInput wecType = new WecTypeInput(typeUuid, "test_wecType", capex, opex, - cosPhiRated, wecCharacteristic, etaConv, sRated, rotorArea, hubHeight) + private static final ComparableQuantity rotorArea = Quantities.getQuantity(20, SQUARE_METRE) + private static final ComparableQuantity hubHeight = Quantities.getQuantity(200, METRE) + public static final WecTypeInput wecType = new WecTypeInput( + typeUuid, + "test_wecType", + capex, + opex, + cosPhiRated, + wecCharacteristic, + etaConv, + sRated, + rotorArea, + hubHeight + ) - public static final WecInput wecInput = new WecInput(participantUuid, "test_wecInput", operator, - operationTime, participantNode, cosPhiP, - wecType, false) + public static final WecInput wecInput = new WecInput( + UUID.fromString("ee7e2e37-a5ad-4def-a832-26a317567ca1"), + "test_wecInput", + operator, + operationTime, + participantNode, + cosPhiP, + wecType, + false + ) // CHP - private static final Quantity etaEl = Quantities.getQuantity(19, PERCENT) - private static final Quantity etaThermal = Quantities.getQuantity(76, PERCENT) - private static final Quantity pOwn = Quantities.getQuantity(0, KILOWATT) - private static final Quantity pThermal = Quantities.getQuantity(9, KILOWATT) - public static final ChpTypeInput chpTypeInput = new ChpTypeInput(typeUuid, "test_chpType", capex, opex, - etaEl, etaThermal, sRated, cosPhiRated, pThermal, pOwn) - - private static final ThermalBusInput thermalBus = new ThermalBusInput(participantUuid, "test_thermalBusInput", operator, operationTime - ) - private static final Quantity storageVolumeLvl = Quantities.getQuantity(1.039154027, CUBIC_METRE) - private static final Quantity storageVolumeLvlMin = Quantities.getQuantity(0.3, CUBIC_METRE) - private static final Quantity inletTemp = Quantities.getQuantity(110, CELSIUS) - private static final Quantity returnTemp = Quantities.getQuantity(80, CELSIUS) - private static final Quantity c = Quantities.getQuantity( - 1, KILOWATTHOUR_PER_KELVIN_TIMES_CUBICMETRE) - private static final ThermalStorageInput thermalStorage = new CylindricalStorageInput(participantUuid, - "test_cylindricThermalStorage", thermalBus, storageVolumeLvl, storageVolumeLvlMin, - inletTemp, returnTemp, c) + private static final ComparableQuantity etaEl = Quantities.getQuantity(19, PERCENT) + private static final ComparableQuantity etaThermal = Quantities.getQuantity(76, PERCENT) + private static final ComparableQuantity pOwn = Quantities.getQuantity(0, KILOWATT) + private static final ComparableQuantity pThermal = Quantities.getQuantity(9, KILOWATT) + public static final ChpTypeInput chpTypeInput = new ChpTypeInput( + typeUuid, + "test_chpType", + capex, + opex, + etaEl, + etaThermal, + sRated, + cosPhiRated, + pThermal, + pOwn + ) - public static final ChpInput chpInput = new ChpInput(participantUuid, "test_chpInput", operator, operationTime, - participantNode, thermalBus, cosPhiFixed, chpTypeInput, thermalStorage, false) + public static final ThermalBusInput thermalBus = new ThermalBusInput( + UUID.fromString("0d95d7f2-49fb-4d49-8636-383a5220384e"), + "test_thermalBusInput", + operator, + operationTime + ) + public static final ComparableQuantity storageVolumeLvl = Quantities.getQuantity(1.039154027, CUBIC_METRE) + public static final ComparableQuantity storageVolumeLvlMin = Quantities.getQuantity(0.3, CUBIC_METRE) + public static final ComparableQuantity inletTemp = Quantities.getQuantity(110, CELSIUS) + public static final ComparableQuantity returnTemp = Quantities.getQuantity(80, CELSIUS) + public static final ComparableQuantity c = Quantities.getQuantity( + 1, KILOWATTHOUR_PER_KELVIN_TIMES_CUBICMETRE) + public static final ThermalStorageInput thermalStorage = new CylindricalStorageInput( + UUID.fromString("8851813b-3a7d-4fee-874b-4df9d724e4b3"), + "test_cylindricThermalStorage", + thermalBus, + storageVolumeLvl, + storageVolumeLvlMin, + inletTemp, + returnTemp, + c + ) + public static final ChpInput chpInput = new ChpInput( + UUID.fromString("9981b4d7-5a8e-4909-9602-e2e7ef4fca5c"), + "test_chpInput", + operator, + operationTime, + participantNode, + thermalBus, + cosPhiFixed, + chpTypeInput, + thermalStorage, + false + ) // BM - private static final Quantity loadGradient = Quantities.getQuantity(25, PERCENT_PER_HOUR) - public static final BmTypeInput bmTypeInput = new BmTypeInput(typeUuid, "test_bmTypeInput", capex, opex, - loadGradient, sRated, cosPhiRated, etaConv) + private static final ComparableQuantity loadGradient = Quantities.getQuantity(25, PERCENT_PER_HOUR) + public static final BmTypeInput bmTypeInput = new BmTypeInput( + typeUuid, + "test_bmTypeInput", + capex, + opex, + loadGradient, + sRated, + cosPhiRated, + etaConv + ) - private static final Quantity feedInTarif = Quantities.getQuantity(10, EURO_PER_MEGAWATTHOUR) - public static final BmInput bmInput = new BmInput(participantUuid, "test_bmInput", operator, operationTime, - participantNode, qV, bmTypeInput, false, false, feedInTarif) + private static final ComparableQuantity feedInTarif = Quantities.getQuantity(10, EURO_PER_MEGAWATTHOUR) + public static final BmInput bmInput = new BmInput( + UUID.fromString("d06e5bb7-a3c7-4749-bdd1-4581ff2f6f4d"), + "test_bmInput", + operator, + operationTime, + participantNode, + qV, + bmTypeInput, + false, + false, + feedInTarif + ) // EV - private static final Quantity eStorage = Quantities.getQuantity(100, KILOWATTHOUR) - private static final Quantity eCons = Quantities.getQuantity(5, KILOWATTHOUR_PER_KILOMETRE) - public static final EvTypeInput evTypeInput = new EvTypeInput(typeUuid, "test_evTypeInput", capex, opex, - eStorage, eCons, sRated, cosPhiRated) - public static final EvInput evInput = new EvInput(participantUuid, "test_evInput", operator, operationTime, - participantNode, cosPhiFixed, evTypeInput) + private static final ComparableQuantity eStorage = Quantities.getQuantity(100, KILOWATTHOUR) + private static final ComparableQuantity eCons = Quantities.getQuantity(5, KILOWATTHOUR_PER_KILOMETRE) + public static final EvTypeInput evTypeInput = new EvTypeInput( + typeUuid, + "test_evTypeInput", + capex, + opex, + eStorage, + eCons, + sRated, + cosPhiRated) + public static final EvInput evInput = new EvInput( + UUID.fromString("a17be20f-c7a7-471d-8ffe-015487c9d022"), + "test_evInput", + operator, + operationTime, + participantNode, + cosPhiFixed, + evTypeInput + ) // Load - private static final Quantity eConsAnnual = Quantities.getQuantity(4000, KILOWATTHOUR) + private static final ComparableQuantity eConsAnnual = Quantities.getQuantity(4000, KILOWATTHOUR) private static final StandardLoadProfile standardLoadProfile = BdewLoadProfile.H0 - public static final LoadInput loadInput = new LoadInput(participantUuid, "test_loadInput", operator, operationTime, - participantNode, cosPhiFixed, standardLoadProfile, false, eConsAnnual, sRated, cosPhiRated) + public static final LoadInput loadInput = new LoadInput( + UUID.fromString("eaf77f7e-9001-479f-94ca-7fb657766f5f"), + "test_loadInput", + operator, + operationTime, + participantNode, + cosPhiFixed, + standardLoadProfile, + false, + eConsAnnual, + sRated, + cosPhiRated + ) // Storage - private static final Quantity pMax = Quantities.getQuantity(15, KILOWATT) - private static final Quantity eta = Quantities.getQuantity(95, PERCENT) - private static final Quantity dod = Quantities.getQuantity(10, PERCENT) - private static final Quantity cpRate = Quantities.getQuantity(1, PU_PER_HOUR) - private static final Quantity