diff --git a/CHANGELOG.md b/CHANGELOG.md index a8802ba6c..b23c80bcf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - BREAKING: Harmonized field naming for time information - BREAKING: Properly applying snake case to result file names - deprecated `TarballUtils` +- Reworking the time series source (one source per time series, distinct mapping source, factory pattern) ### Fixed - InfluxDbConnector now keeps session instead of creating a new one each call diff --git a/docs/uml/main/DataSourceClassDiagram.puml b/docs/uml/main/DataSourceClassDiagram.puml index 8bcea506c..f47c0696b 100644 --- a/docs/uml/main/DataSourceClassDiagram.puml +++ b/docs/uml/main/DataSourceClassDiagram.puml @@ -88,11 +88,35 @@ interface WeatherSource { } DataSource <|-- WeatherSource -interface WholeSalePriceSource { - {abstract} IndividualTimeSeries getWholesalePrice(ClosedInterval) - {abstract} IndividualTimeSeries getWholesalePrice(ZonedDateTime) +interface TimeSeriesMappingSource { + {abstract} Map getMapping() + Optional getTimeSeriesUuid(UUID) + {abstract} Optional getTimeSeriesMetaInformation(UUID) } -DataSource <|-- WholeSalePriceSource +DataSource <|-- TimeSeriesMappingSource + +class CsvTimeSeriesMappingSource { + - TimeSeriesMappingFactory mappingFactory + - Map mapping +} +CsvTimeSeriesMappingSource <|.. TimeSeriesMappingSource +CsvTimeSeriesMappingSource <|-- CsvDataSource + +interface TimeSeriesSource { + {abstract} IndividualTimeSeries getTimeSeries() + {abstract} IndividualTimeSeries getTimeSeries(ClosedInterval)) + {abstract} Optional getValue(ZonedDateTime) +} +TimeSeriesSource <|-- DataSource + +class CsvTimeSeriesSource { + - IndividualTimeSeries timeSeries + + {static} CsvTimeSeriesSource getSource(\n\tString,\n\tString,\n\tFileNamingStrategy,\n\tsvFileConnector.CsvIndividualTimeSeriesMetaInformation) + - IndividualTimeSeries buildIndividualTimeSeries(\n\tUUID,\n\tfilePath,\n\tFunction,\n\tOptional>>) + - Optional> buildTimeBasedValue(\n\tMap,\n\tClass,\n\tTimeBasedSimpleValueFactory) +} +CsvTimeSeriesSource <|.. TimeSeriesSource +CsvTimeSeriesSource <|-- CsvDataSource interface DataConnector { {abstract} shutdown() diff --git a/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java b/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java index b5789866f..b84583b45 100644 --- a/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java +++ b/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java @@ -38,6 +38,8 @@ public class CsvFileConnector implements DataConnector { private final Map, BufferedCsvWriter> entityWriters = new HashMap<>(); private final Map timeSeriesWriters = new HashMap<>(); + // ATTENTION: Do not finalize. It's meant for lazy evaluation. + private Map individualTimeSeriesMetaInformation; private final FileNamingStrategy fileNamingStrategy; private final String baseDirectoryName; @@ -175,6 +177,45 @@ public BufferedReader initReader(String filePath) throws FileNotFoundException { new InputStreamReader(new FileInputStream(fullPath), StandardCharsets.UTF_8), 16384); } + /** + * Get time series meta information for a given uuid. + * + *

This method lazily evaluates the mapping from all time series files to their meta + * information. + * + * @param timeSeriesUuid The time series in question + * @return An option on the queried information + */ + public Optional getIndividualTimeSeriesMetaInformation( + UUID timeSeriesUuid) { + if (Objects.isNull(individualTimeSeriesMetaInformation)) + individualTimeSeriesMetaInformation = buildIndividualTimeSeriesMetaInformation(); + + return Optional.ofNullable(individualTimeSeriesMetaInformation.get(timeSeriesUuid)); + } + + /** + * This method creates a map from time series uuid to it's meta information. + * + * @return Mapping from time series uuid to it's meta information. + */ + private Map + buildIndividualTimeSeriesMetaInformation() { + return getIndividualTimeSeriesFilePaths() + .parallelStream() + .map( + filePath -> { + /* Extract meta information from file path and enhance it with the file path itself */ + String filePathWithoutEnding = removeFileEnding(filePath); + IndividualTimeSeriesMetaInformation metaInformation = + (IndividualTimeSeriesMetaInformation) + fileNamingStrategy.extractTimeSeriesMetaInformation(filePathWithoutEnding); + return new CsvIndividualTimeSeriesMetaInformation( + metaInformation, filePathWithoutEnding); + }) + .collect(Collectors.toMap(FileNameMetaInformation::getUuid, v -> v)); + } + /** * Initialises the readers for time series with the specified column schemes. They are given back * grouped by the column scheme in order to allow for accounting the different content types. @@ -182,7 +223,10 @@ public BufferedReader initReader(String filePath) throws FileNotFoundException { * @param columnSchemes the column schemes to initialize readers for. If no scheme is given, all * possible readers will be initialized. * @return A mapping from column type to respective readers + * @deprecated Don't use {@link TimeSeriesReadingData}, as it contains a reader, that might not be + * closed */ + @Deprecated public Map> initTimeSeriesReader( ColumnScheme... columnSchemes) { return getIndividualTimeSeriesFilePaths() @@ -245,7 +289,10 @@ private Set getIndividualTimeSeriesFilePaths() { * @param columnSchemes the allowed column schemes. If no scheme is specified, all schemes are * allowed. * @return An {@link Optional} to {@link TimeSeriesReadingData} + * @deprecated Don't use {@link TimeSeriesReadingData}, as it contains a reader, that might not be + * closed */ + @Deprecated private Optional buildReadingData( String filePathString, ColumnScheme... columnSchemes) { try { @@ -360,7 +407,12 @@ public void shutdown() { }); } - /** Class to bundle all information, that are necessary to read a single time series */ + /** + * Class to bundle all information, that are necessary to read a single time series + * + * @deprecated Use the {@link CsvIndividualTimeSeriesMetaInformation} and build reader on demand + */ + @Deprecated public static class TimeSeriesReadingData { private final UUID uuid; private final ColumnScheme columnScheme; @@ -411,4 +463,52 @@ public String toString() { + '}'; } } + + /** Enhancing the {@link IndividualTimeSeriesMetaInformation} with the full path to csv file */ + public static class CsvIndividualTimeSeriesMetaInformation + extends IndividualTimeSeriesMetaInformation { + private final String fullFilePath; + + public CsvIndividualTimeSeriesMetaInformation( + UUID uuid, ColumnScheme columnScheme, String fullFilePath) { + super(uuid, columnScheme); + this.fullFilePath = fullFilePath; + } + + public CsvIndividualTimeSeriesMetaInformation( + IndividualTimeSeriesMetaInformation metaInformation, String fullFilePath) { + this(metaInformation.getUuid(), metaInformation.getColumnScheme(), fullFilePath); + } + + public String getFullFilePath() { + return fullFilePath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof CsvIndividualTimeSeriesMetaInformation)) return false; + if (!super.equals(o)) return false; + CsvIndividualTimeSeriesMetaInformation that = (CsvIndividualTimeSeriesMetaInformation) o; + return fullFilePath.equals(that.fullFilePath); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), fullFilePath); + } + + @Override + public String toString() { + return "CsvIndividualTimeSeriesMetaInformation{" + + "uuid=" + + getUuid() + + ", columnScheme=" + + getColumnScheme() + + ", fullFilePath='" + + fullFilePath + + '\'' + + '}'; + } + } } diff --git a/src/main/java/edu/ie3/datamodel/io/csv/DefaultDirectoryHierarchy.java b/src/main/java/edu/ie3/datamodel/io/csv/DefaultDirectoryHierarchy.java index 2bf235b41..48d60243d 100644 --- a/src/main/java/edu/ie3/datamodel/io/csv/DefaultDirectoryHierarchy.java +++ b/src/main/java/edu/ie3/datamodel/io/csv/DefaultDirectoryHierarchy.java @@ -6,6 +6,7 @@ package edu.ie3.datamodel.io.csv; import edu.ie3.datamodel.exceptions.FileException; +import edu.ie3.datamodel.io.source.TimeSeriesMappingSource; import edu.ie3.datamodel.models.UniqueEntity; import edu.ie3.datamodel.models.input.*; import edu.ie3.datamodel.models.input.connector.LineInput; @@ -30,7 +31,6 @@ import edu.ie3.datamodel.models.result.system.*; import edu.ie3.datamodel.models.result.thermal.ThermalUnitResult; import edu.ie3.datamodel.models.timeseries.TimeSeries; -import edu.ie3.datamodel.models.timeseries.mapping.TimeSeriesMapping; import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileInput; import java.io.File; import java.io.IOException; @@ -299,7 +299,8 @@ private enum SubDirectories { TIME_SERIES( PARTICIPANTS_INPUT.relPath + "time_series" + FILE_SEPARATOR, false, - Stream.of(TimeSeries.class, TimeSeriesMapping.Entry.class).collect(Collectors.toSet())), + Stream.of(TimeSeries.class, TimeSeriesMappingSource.MappingEntry.class) + .collect(Collectors.toSet())), THERMAL_INPUT( Constants.INPUT_SUB_TREE + FILE_SEPARATOR + "thermal" + FILE_SEPARATOR, false, diff --git a/src/main/java/edu/ie3/datamodel/io/csv/FileNamingStrategy.java b/src/main/java/edu/ie3/datamodel/io/csv/FileNamingStrategy.java index 563ff6737..585f53061 100644 --- a/src/main/java/edu/ie3/datamodel/io/csv/FileNamingStrategy.java +++ b/src/main/java/edu/ie3/datamodel/io/csv/FileNamingStrategy.java @@ -8,6 +8,7 @@ import edu.ie3.datamodel.io.csv.timeseries.ColumnScheme; import edu.ie3.datamodel.io.csv.timeseries.IndividualTimeSeriesMetaInformation; import edu.ie3.datamodel.io.csv.timeseries.LoadProfileTimeSeriesMetaInformation; +import edu.ie3.datamodel.io.source.TimeSeriesMappingSource; import edu.ie3.datamodel.models.UniqueEntity; import edu.ie3.datamodel.models.input.AssetInput; import edu.ie3.datamodel.models.input.AssetTypeInput; @@ -19,7 +20,6 @@ import edu.ie3.datamodel.models.timeseries.TimeSeries; import edu.ie3.datamodel.models.timeseries.TimeSeriesEntry; import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries; -import edu.ie3.datamodel.models.timeseries.mapping.TimeSeriesMapping; import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileInput; import edu.ie3.datamodel.models.value.*; import edu.ie3.util.StringUtils; @@ -235,7 +235,8 @@ public Optional getFileName(Class cls) { return getGraphicsInputFileName(cls.asSubclass(GraphicInput.class)); if (OperatorInput.class.isAssignableFrom(cls)) return getOperatorInputFileName(cls.asSubclass(OperatorInput.class)); - if (TimeSeriesMapping.Entry.class.isAssignableFrom(cls)) return getTimeSeriesMappingFileName(); + if (TimeSeriesMappingSource.MappingEntry.class.isAssignableFrom(cls)) + return getTimeSeriesMappingFileName(); logger.error("There is no naming strategy defined for {}", cls.getSimpleName()); return Optional.empty(); } diff --git a/src/main/java/edu/ie3/datamodel/io/factory/timeseries/TimeSeriesMappingFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/timeseries/TimeSeriesMappingFactory.java index 00ff4dd43..64aa90af4 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/timeseries/TimeSeriesMappingFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/timeseries/TimeSeriesMappingFactory.java @@ -7,7 +7,7 @@ import edu.ie3.datamodel.io.factory.EntityFactory; import edu.ie3.datamodel.io.factory.SimpleEntityData; -import edu.ie3.datamodel.models.timeseries.mapping.TimeSeriesMapping; +import edu.ie3.datamodel.io.source.TimeSeriesMappingSource; import java.util.Collections; import java.util.List; import java.util.Set; @@ -16,13 +16,13 @@ import java.util.stream.Stream; public class TimeSeriesMappingFactory - extends EntityFactory { + extends EntityFactory { private static final String UUID = "uuid"; private static final String PARTICIPANT = "participant"; private static final String TIME_SERIES = "timeSeries"; public TimeSeriesMappingFactory() { - super(TimeSeriesMapping.Entry.class); + super(TimeSeriesMappingSource.MappingEntry.class); } @Override @@ -32,10 +32,10 @@ protected List> getFields(SimpleEntityData data) { } @Override - protected TimeSeriesMapping.Entry buildModel(SimpleEntityData data) { + protected TimeSeriesMappingSource.MappingEntry buildModel(SimpleEntityData data) { UUID uuid = data.getUUID(UUID); UUID participant = data.getUUID(PARTICIPANT); UUID timeSeries = data.getUUID(TIME_SERIES); - return new TimeSeriesMapping.Entry(uuid, participant, timeSeries); + return new TimeSeriesMappingSource.MappingEntry(uuid, participant, timeSeries); } } diff --git a/src/main/java/edu/ie3/datamodel/io/processor/input/InputEntityProcessor.java b/src/main/java/edu/ie3/datamodel/io/processor/input/InputEntityProcessor.java index 0542e235f..f5e4a7ec3 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/input/InputEntityProcessor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/input/InputEntityProcessor.java @@ -6,6 +6,7 @@ package edu.ie3.datamodel.io.processor.input; import edu.ie3.datamodel.io.processor.EntityProcessor; +import edu.ie3.datamodel.io.source.TimeSeriesMappingSource; import edu.ie3.datamodel.models.input.*; import edu.ie3.datamodel.models.input.connector.*; import edu.ie3.datamodel.models.input.connector.type.LineTypeInput; @@ -18,7 +19,6 @@ import edu.ie3.datamodel.models.input.thermal.CylindricalStorageInput; import edu.ie3.datamodel.models.input.thermal.ThermalBusInput; import edu.ie3.datamodel.models.input.thermal.ThermalHouseInput; -import edu.ie3.datamodel.models.timeseries.mapping.TimeSeriesMapping; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -39,7 +39,7 @@ public class InputEntityProcessor extends EntityProcessor { /* InputEntity */ OperatorInput.class, RandomLoadParameters.class, - TimeSeriesMapping.Entry.class, + TimeSeriesMappingSource.MappingEntry.class, /* - AssetInput */ NodeInput.class, LineInput.class, diff --git a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMappingSource.java b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMappingSource.java new file mode 100644 index 000000000..b85e31994 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMappingSource.java @@ -0,0 +1,90 @@ +/* + * © 2021. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.source; + +import edu.ie3.datamodel.io.csv.timeseries.IndividualTimeSeriesMetaInformation; +import edu.ie3.datamodel.models.input.InputEntity; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.UUID; + +/** + * This interface describes basic function to handle mapping between models and their respective + * time series + */ +public interface TimeSeriesMappingSource extends DataSource { + /** + * Get a mapping from model {@link UUID} to the time series {@link UUID} + * + * @return That mapping + */ + Map getMapping(); + + /** + * Get a time series identifier to a given model identifier + * + * @param modelIdentifier Identifier of the model + * @return An {@link Optional} to the time series identifier + */ + default Optional getTimeSeriesUuid(UUID modelIdentifier) { + return Optional.ofNullable(getMapping().get(modelIdentifier)); + } + + /** + * Get an option on the given time series meta information + * + * @param timeSeriesUuid Unique identifier of the time series in question + * @return An Option onto the meta information + */ + Optional getTimeSeriesMetaInformation(UUID timeSeriesUuid); + + /** Class to represent one entry within the participant to time series mapping */ + class MappingEntry extends InputEntity { + private final UUID participant; + private final UUID timeSeries; + + public MappingEntry(UUID uuid, UUID participant, UUID timeSeries) { + super(uuid); + this.participant = participant; + this.timeSeries = timeSeries; + } + + public UUID getParticipant() { + return participant; + } + + public UUID getTimeSeries() { + return timeSeries; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof MappingEntry)) return false; + if (!super.equals(o)) return false; + MappingEntry that = (MappingEntry) o; + return participant.equals(that.participant) && timeSeries.equals(that.timeSeries); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), participant, timeSeries); + } + + @Override + public String toString() { + return "MappingEntry{" + + "uuid=" + + getUuid() + + ", participant=" + + participant + + ", timeSeries=" + + timeSeries + + '}'; + } + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesSource.java b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesSource.java index 766297946..e74284867 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesSource.java @@ -5,22 +5,39 @@ */ package edu.ie3.datamodel.io.source; -import edu.ie3.datamodel.models.timeseries.TimeSeriesContainer; -import edu.ie3.datamodel.models.timeseries.mapping.TimeSeriesMapping; -import java.util.Set; +import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries; +import edu.ie3.datamodel.models.value.Value; +import edu.ie3.util.interval.ClosedInterval; +import java.time.ZonedDateTime; +import java.util.Optional; + +/** + * The interface definition of a source, that is able to provide one specific time series for one + * model + */ +public interface TimeSeriesSource extends DataSource { + + /** + * Obtain the full time series + * + * @return the time series + */ + IndividualTimeSeries getTimeSeries(); -public interface TimeSeriesSource { /** - * Receive a set of time series mapping entries from participant uuid to time series uuid. + * Get the time series for the given time interval. If the interval is bigger than the time series + * itself, only the parts of the time series within the interval are handed back. * - * @return A set of time series mapping entries from participant uuid to time series uuid + * @param timeInterval Desired time interval to cover + * @return The parts of of interest of the time series */ - Set getMapping(); + IndividualTimeSeries getTimeSeries(ClosedInterval timeInterval); /** - * Acquire all available time series + * Get the time series value for a specific time * - * @return A container with all relevant time series + * @param time The queried time + * @return Option on a value for that time */ - TimeSeriesContainer getTimeSeries(); + Optional getValue(ZonedDateTime time); } diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMappingSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMappingSource.java new file mode 100644 index 000000000..73dd3461f --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMappingSource.java @@ -0,0 +1,51 @@ +/* + * © 2021. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.source.csv; + +import edu.ie3.datamodel.io.csv.FileNamingStrategy; +import edu.ie3.datamodel.io.csv.timeseries.IndividualTimeSeriesMetaInformation; +import edu.ie3.datamodel.io.factory.SimpleEntityData; +import edu.ie3.datamodel.io.factory.timeseries.TimeSeriesMappingFactory; +import edu.ie3.datamodel.io.source.TimeSeriesMappingSource; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Collectors; + +public class CsvTimeSeriesMappingSource extends CsvDataSource implements TimeSeriesMappingSource { + /* Available factories */ + private final TimeSeriesMappingFactory mappingFactory = new TimeSeriesMappingFactory(); + + private final Map mapping; + + public CsvTimeSeriesMappingSource( + String csvSep, String folderPath, FileNamingStrategy fileNamingStrategy) { + super(csvSep, folderPath, fileNamingStrategy); + + /* Build the map */ + mapping = + filterEmptyOptionals( + buildStreamWithFieldsToAttributesMap(MappingEntry.class, connector) + .map( + fieldToValues -> { + SimpleEntityData entityData = + new SimpleEntityData(fieldToValues, MappingEntry.class); + return mappingFactory.get(entityData); + })) + .collect(Collectors.toMap(MappingEntry::getParticipant, MappingEntry::getTimeSeries)); + } + + @Override + public Map getMapping() { + return mapping; + } + + @Override + public Optional getTimeSeriesMetaInformation( + UUID timeSeriesUuid) { + return connector.getIndividualTimeSeriesMetaInformation(timeSeriesUuid); + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSource.java index 816c58573..56f2499bb 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSource.java @@ -5,172 +5,199 @@ */ package edu.ie3.datamodel.io.source.csv; -import edu.ie3.datamodel.io.connectors.CsvFileConnector.TimeSeriesReadingData; +import edu.ie3.datamodel.exceptions.SourceException; +import edu.ie3.datamodel.io.connectors.CsvFileConnector; import edu.ie3.datamodel.io.csv.FileNamingStrategy; -import edu.ie3.datamodel.io.csv.timeseries.ColumnScheme; -import edu.ie3.datamodel.io.factory.SimpleEntityData; import edu.ie3.datamodel.io.factory.timeseries.*; import edu.ie3.datamodel.io.source.TimeSeriesSource; -import edu.ie3.datamodel.models.timeseries.TimeSeriesContainer; import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries; import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue; -import edu.ie3.datamodel.models.timeseries.mapping.TimeSeriesMapping; import edu.ie3.datamodel.models.value.*; +import edu.ie3.datamodel.utils.TimeSeriesUtil; +import edu.ie3.util.interval.ClosedInterval; +import java.io.BufferedReader; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.time.ZonedDateTime; import java.util.*; import java.util.function.Function; import java.util.stream.Collectors; /** Source that is capable of providing information around time series from csv files. */ -public class CsvTimeSeriesSource extends CsvDataSource implements TimeSeriesSource { - - /* Available factories */ - private final TimeSeriesMappingFactory mappingFactory = new TimeSeriesMappingFactory(); - private final TimeBasedSimpleValueFactory energyPriceFactory = - new TimeBasedSimpleValueFactory<>(EnergyPriceValue.class); - private final TimeBasedSimpleValueFactory heatAndSValueFactory = - new TimeBasedSimpleValueFactory<>(HeatAndSValue.class); - private final TimeBasedSimpleValueFactory heatAndPValueFactory = - new TimeBasedSimpleValueFactory<>(HeatAndPValue.class); - private final TimeBasedSimpleValueFactory heatDemandValueFactory = - new TimeBasedSimpleValueFactory<>(HeatDemandValue.class); - private final TimeBasedSimpleValueFactory sValueFactory = - new TimeBasedSimpleValueFactory<>(SValue.class); - private final TimeBasedSimpleValueFactory pValueFactory = - new TimeBasedSimpleValueFactory<>(PValue.class); +public class CsvTimeSeriesSource extends CsvDataSource + implements TimeSeriesSource { + private final IndividualTimeSeries timeSeries; /** - * Initializes a new CsvTimeSeriesSource + * Factory method to build a source from given meta information * * @param csvSep the separator string for csv columns * @param folderPath path to the folder holding the time series files * @param fileNamingStrategy strategy for the naming of time series files + * @param metaInformation The given meta information + * @throws SourceException If the given meta information are not supported + * @return The source */ - public CsvTimeSeriesSource( - String csvSep, String folderPath, FileNamingStrategy fileNamingStrategy) { - super(csvSep, folderPath, fileNamingStrategy); - } - - /** - * Receive a set of time series mapping entries from participant uuid to time series uuid. - * - * @return A set of time series mapping entries from participant uuid to time series uuid - */ - @Override - public Set getMapping() { - return filterEmptyOptionals( - buildStreamWithFieldsToAttributesMap(TimeSeriesMapping.Entry.class, connector) - .map( - fieldToValues -> { - SimpleEntityData entityData = - new SimpleEntityData(fieldToValues, TimeSeriesMapping.Entry.class); - return mappingFactory.get(entityData); - })) - .collect(Collectors.toSet()); - } - - /** - * Acquire all available time series - * - * @return A container with all relevant time series - */ - @Override - public TimeSeriesContainer getTimeSeries() { - /* Get all time series reader */ - Map> colTypeToReadingData = - connector.initTimeSeriesReader(); - - /* Reading in energy price time series */ - Set> energyPriceTimeSeries = - read( - colTypeToReadingData.get(ColumnScheme.ENERGY_PRICE), + public static CsvTimeSeriesSource getSource( + String csvSep, + String folderPath, + FileNamingStrategy fileNamingStrategy, + CsvFileConnector.CsvIndividualTimeSeriesMetaInformation metaInformation) + throws SourceException { + switch (metaInformation.getColumnScheme()) { + case ACTIVE_POWER: + TimeBasedSimpleValueFactory pValueFactory = + new TimeBasedSimpleValueFactory<>(PValue.class); + return new CsvTimeSeriesSource<>( + csvSep, + folderPath, + fileNamingStrategy, + metaInformation.getUuid(), + metaInformation.getFullFilePath(), + PValue.class, + pValueFactory); + case APPARENT_POWER: + TimeBasedSimpleValueFactory sValueFactory = + new TimeBasedSimpleValueFactory<>(SValue.class); + return new CsvTimeSeriesSource<>( + csvSep, + folderPath, + fileNamingStrategy, + metaInformation.getUuid(), + metaInformation.getFullFilePath(), + SValue.class, + sValueFactory); + case ENERGY_PRICE: + TimeBasedSimpleValueFactory energyPriceFactory = + new TimeBasedSimpleValueFactory<>(EnergyPriceValue.class); + return new CsvTimeSeriesSource<>( + csvSep, + folderPath, + fileNamingStrategy, + metaInformation.getUuid(), + metaInformation.getFullFilePath(), EnergyPriceValue.class, energyPriceFactory); - - /* Reading in heat and apparent power time series */ - Set> heatAndApparentPowerTimeSeries = - read( - colTypeToReadingData.get(ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND), + case APPARENT_POWER_AND_HEAT_DEMAND: + TimeBasedSimpleValueFactory heatAndSValueFactory = + new TimeBasedSimpleValueFactory<>(HeatAndSValue.class); + return new CsvTimeSeriesSource<>( + csvSep, + folderPath, + fileNamingStrategy, + metaInformation.getUuid(), + metaInformation.getFullFilePath(), HeatAndSValue.class, heatAndSValueFactory); - - /* Reading in heat time series */ - Set> heatTimeSeries = - read( - colTypeToReadingData.get(ColumnScheme.HEAT_DEMAND), - HeatDemandValue.class, - heatDemandValueFactory); - - /* Reading in heat and active power time series */ - Set> heatAndActivePowerTimeSeries = - read( - colTypeToReadingData.get(ColumnScheme.ACTIVE_POWER_AND_HEAT_DEMAND), + case ACTIVE_POWER_AND_HEAT_DEMAND: + TimeBasedSimpleValueFactory heatAndPValueFactory = + new TimeBasedSimpleValueFactory<>(HeatAndPValue.class); + return new CsvTimeSeriesSource<>( + csvSep, + folderPath, + fileNamingStrategy, + metaInformation.getUuid(), + metaInformation.getFullFilePath(), HeatAndPValue.class, heatAndPValueFactory); - - /* Reading in apparent power time series */ - Set> apparentPowerTimeSeries = - read(colTypeToReadingData.get(ColumnScheme.APPARENT_POWER), SValue.class, sValueFactory); - - /* Reading in active power time series */ - Set> activePowerTimeSeries = - read(colTypeToReadingData.get(ColumnScheme.ACTIVE_POWER), PValue.class, pValueFactory); - - return new TimeSeriesContainer( - energyPriceTimeSeries, - heatAndApparentPowerTimeSeries, - heatAndActivePowerTimeSeries, - heatTimeSeries, - apparentPowerTimeSeries, - activePowerTimeSeries); + case HEAT_DEMAND: + TimeBasedSimpleValueFactory heatDemandValueFactory = + new TimeBasedSimpleValueFactory<>(HeatDemandValue.class); + return new CsvTimeSeriesSource<>( + csvSep, + folderPath, + fileNamingStrategy, + metaInformation.getUuid(), + metaInformation.getFullFilePath(), + HeatDemandValue.class, + heatDemandValueFactory); + default: + throw new SourceException( + "Unsupported column scheme '" + metaInformation.getColumnScheme() + "'."); + } } /** - * Reads in time series of a specified class from given {@link TimeSeriesReadingData} utilising a - * provided {@link TimeBasedSimpleValueFactory}, except for weather data, which needs a special - * processing + * Initializes a new CsvTimeSeriesSource * - * @param readingData Data needed for reading - * @param valueClass Class of the target value within the time series - * @param factory Factory to utilize - * @param Type of the value - * @return A set of {@link IndividualTimeSeries} + * @param csvSep the separator string for csv columns + * @param folderPath path to the folder holding the time series files + * @param fileNamingStrategy strategy for the naming of time series files + * @param timeSeriesUuid Unique identifier of the time series + * @param filePath Path of the file, excluding extension and being relative to {@code folderPath} + * @param valueClass Class of the value + * @param factory The factory implementation to use for actual parsing of input data */ - private Set> read( - Set readingData, + public CsvTimeSeriesSource( + String csvSep, + String folderPath, + FileNamingStrategy fileNamingStrategy, + UUID timeSeriesUuid, + String filePath, Class valueClass, TimeBasedSimpleValueFactory factory) { - return readingData - .parallelStream() - .map( - data -> - buildIndividualTimeSeries( - data, - fieldToValue -> this.buildTimeBasedValue(fieldToValue, valueClass, factory))) - .collect(Collectors.toSet()); + super(csvSep, folderPath, fileNamingStrategy); + + /* Read in the full time series */ + try { + this.timeSeries = + buildIndividualTimeSeries( + timeSeriesUuid, + filePath, + fieldToValue -> this.buildTimeBasedValue(fieldToValue, valueClass, factory)); + } catch (SourceException e) { + throw new IllegalArgumentException( + "Unable to obtain time series with UUID '" + + timeSeriesUuid + + "'. Please check arguments!", + e); + } + } + + @Override + public IndividualTimeSeries getTimeSeries() { + return timeSeries; + } + + @Override + public IndividualTimeSeries getTimeSeries(ClosedInterval timeInterval) { + return TimeSeriesUtil.trimTimeSeriesToInterval(timeSeries, timeInterval); + } + + @Override + public Optional getValue(ZonedDateTime time) { + return timeSeries.getValue(time); } /** - * Builds an individual time series, by obtaining the single entries (with the help of {@code - * fieldToValueFunction} and putting everything together in the {@link IndividualTimeSeries} - * container. + * Attempts to read a time series with given unique identifier and file path. Single entries are + * obtained entries with the help of {@code fieldToValueFunction}. * - * @param data Needed data to read in the content of the specific, underlying file - * @param fieldToValueFunction Function, that is able to transfer a mapping (from field to value) + * @param timeSeriesUuid unique identifier of the time series + * @param filePath path to the file to read + * @param fieldToValueFunction function, that is able to transfer a mapping (from field to value) * onto a specific instance of the targeted entry class - * @param Type of the {@link Value}, that will be contained in each time series, timely - * located entry - * @return An {@link IndividualTimeSeries} with {@link TimeBasedValue} of type {@code V}. + * @throws SourceException If the file cannot be read properly + * @return An option onto an individual time series */ - private IndividualTimeSeries buildIndividualTimeSeries( - TimeSeriesReadingData data, - Function, Optional>> fieldToValueFunction) { - Set> timeBasedValues = - filterEmptyOptionals( - buildStreamWithFieldsToAttributesMap(TimeBasedValue.class, data.getReader()) - .map(fieldToValueFunction)) - .collect(Collectors.toSet()); - - return new IndividualTimeSeries<>(data.getUuid(), timeBasedValues); + private IndividualTimeSeries buildIndividualTimeSeries( + UUID timeSeriesUuid, + String filePath, + Function, Optional>> fieldToValueFunction) + throws SourceException { + try (BufferedReader reader = connector.initReader(filePath)) { + Set> timeBasedValues = + filterEmptyOptionals( + buildStreamWithFieldsToAttributesMap(TimeBasedValue.class, reader) + .map(fieldToValueFunction)) + .collect(Collectors.toSet()); + + return new IndividualTimeSeries<>(timeSeriesUuid, timeBasedValues); + } catch (FileNotFoundException e) { + throw new SourceException("Unable to find a file with path '" + filePath + "'.", e); + } catch (IOException e) { + throw new SourceException("Error during reading of file'" + filePath + "'.", e); + } } /** @@ -180,10 +207,9 @@ private IndividualTimeSeries buildIndividualTimeSeries( * @param fieldToValues Mapping from field id to values * @param valueClass Class of the desired underlying value * @param factory Factory to process the "flat" information - * @param Type of the underlying value * @return Optional simple time based value */ - private Optional> buildTimeBasedValue( + private Optional> buildTimeBasedValue( Map fieldToValues, Class valueClass, TimeBasedSimpleValueFactory factory) { diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvWeatherSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvWeatherSource.java index b21550346..3be0790a2 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvWeatherSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvWeatherSource.java @@ -18,6 +18,7 @@ import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue; import edu.ie3.datamodel.models.value.Value; import edu.ie3.datamodel.models.value.WeatherValue; +import edu.ie3.datamodel.utils.TimeSeriesUtil; import edu.ie3.util.interval.ClosedInterval; import java.io.BufferedReader; import java.io.IOException; @@ -141,24 +142,7 @@ private Map> trimMapToInterval( .collect( Collectors.toMap( Map.Entry::getKey, - entry -> trimTimeSeriesToInterval(entry.getValue(), timeInterval))); - } - - /** - * Trims a time series to the given time interval - * - * @param timeSeries the time series to trim - * @param timeInterval the interval to trim the data to - * @return the trimmed time series - */ - private IndividualTimeSeries trimTimeSeriesToInterval( - IndividualTimeSeries timeSeries, ClosedInterval timeInterval) { - return new IndividualTimeSeries<>( - timeSeries.getUuid(), - timeSeries.getEntries().stream() - .parallel() - .filter(value -> timeInterval.includes(value.getTime())) - .collect(Collectors.toSet())); + entry -> TimeSeriesUtil.trimTimeSeriesToInterval(entry.getValue(), timeInterval))); } /** diff --git a/src/main/java/edu/ie3/datamodel/models/timeseries/TimeSeriesContainer.java b/src/main/java/edu/ie3/datamodel/models/timeseries/TimeSeriesContainer.java index efe031ba5..03b273c18 100644 --- a/src/main/java/edu/ie3/datamodel/models/timeseries/TimeSeriesContainer.java +++ b/src/main/java/edu/ie3/datamodel/models/timeseries/TimeSeriesContainer.java @@ -13,6 +13,7 @@ import java.util.stream.Stream; /** Container class to hold different types of individual time series */ +@Deprecated public class TimeSeriesContainer { private final Set> energyPrice; private final Set> heatAndApparentPower; diff --git a/src/main/java/edu/ie3/datamodel/models/timeseries/mapping/TimeSeriesMapping.java b/src/main/java/edu/ie3/datamodel/models/timeseries/mapping/TimeSeriesMapping.java deleted file mode 100644 index de3317b42..000000000 --- a/src/main/java/edu/ie3/datamodel/models/timeseries/mapping/TimeSeriesMapping.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * © 2021. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.models.timeseries.mapping; - -import edu.ie3.datamodel.models.UniqueEntity; -import edu.ie3.datamodel.models.input.InputEntity; -import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries; -import edu.ie3.datamodel.models.value.Value; -import java.util.*; -import java.util.function.Function; -import java.util.stream.Collectors; - -public class TimeSeriesMapping { - private final Map> mapping; - - public TimeSeriesMapping(Map> mapping) { - this.mapping = mapping; - } - - /** - * Builds the mapping from given entries (e.g. from a file) and available time series. If a - * referred time series is not available, an {@link IllegalArgumentException} is thrown. - * - * @param entries Collection of mapping entries - * @param timeSeries Available time series - */ - public TimeSeriesMapping( - Collection entries, Collection> timeSeries) { - /* Map time series from their uuid to themselves */ - Map> timeSeriesMap = - timeSeries.stream().collect(Collectors.toMap(UniqueEntity::getUuid, Function.identity())); - - /* Map from participant UUID to time series */ - mapping = - entries.stream() - .collect( - Collectors.toMap( - entry -> entry.participant, - entry -> { - UUID tsUuid = entry.timeSeries; - if (timeSeriesMap.containsKey(tsUuid)) return timeSeriesMap.get(tsUuid); - else - throw new IllegalArgumentException( - "Cannot find referenced time series with uuid '" + tsUuid + "'."); - })); - } - - /** - * Try to get a matching time series for the given participant uuid. - * - * @param participantUuid UUID of the questioned participant - * @return Optional time series, if it is available, empty Optional otherwise - */ - public Optional> get(UUID participantUuid) { - return Optional.ofNullable(mapping.get(participantUuid)); - } - - /** - * Builds the mapping entries from the given mapping - * - * @return A List of {@link Entry}s - */ - public List buildEntries() { - return mapping - .entrySet() - .parallelStream() - .map( - mapEntry -> - new Entry(UUID.randomUUID(), mapEntry.getKey(), mapEntry.getValue().getUuid())) - .collect(Collectors.toList()); - } - - /** - * Model class to denote a single mapping between a participant (represented by it's UUID) and the - * corresponding time series (represented with a UUID as well). - */ - public static class Entry extends InputEntity { - - private final UUID participant; - - private final UUID timeSeries; - - public Entry(UUID uuid, UUID participant, UUID timeSeries) { - super(uuid); - this.participant = participant; - this.timeSeries = timeSeries; - } - - public UUID getParticipant() { - return participant; - } - - public UUID getTimeSeries() { - return timeSeries; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof TimeSeriesMapping.Entry)) return false; - if (!super.equals(o)) return false; - TimeSeriesMapping.Entry that = (TimeSeriesMapping.Entry) o; - return participant.equals(that.participant) && timeSeries.equals(that.timeSeries); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), participant, timeSeries); - } - - @Override - public String toString() { - return "MappingEntry{" - + "uuid=" - + getUuid() - + ", participant=" - + participant - + ", timeSeries=" - + timeSeries - + "} "; - } - } -} diff --git a/src/main/java/edu/ie3/datamodel/utils/TimeSeriesUtil.java b/src/main/java/edu/ie3/datamodel/utils/TimeSeriesUtil.java new file mode 100644 index 000000000..3fa215e38 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/utils/TimeSeriesUtil.java @@ -0,0 +1,37 @@ +/* + * © 2021. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.utils; + +import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries; +import edu.ie3.datamodel.models.value.Value; +import edu.ie3.util.interval.ClosedInterval; +import java.time.ZonedDateTime; +import java.util.stream.Collectors; + +public class TimeSeriesUtil { + /** Private Constructor as this class is not meant to be instantiated */ + private TimeSeriesUtil() { + throw new IllegalStateException("Utility classes cannot be instantiated"); + } + + /** + * Trims a time series to the given time interval + * + * @param timeSeries the time series to trim + * @param timeInterval the interval to trim the data to + * @param Type of value carried wit the time series + * @return Trimmed time series + */ + public static IndividualTimeSeries trimTimeSeriesToInterval( + IndividualTimeSeries timeSeries, ClosedInterval timeInterval) { + return new IndividualTimeSeries<>( + timeSeries.getUuid(), + timeSeries.getEntries().stream() + .parallel() + .filter(value -> timeInterval.includes(value.getTime())) + .collect(Collectors.toSet())); + } +} diff --git a/src/test/groovy/edu/ie3/datamodel/io/connectors/CsvFileConnectorTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/connectors/CsvFileConnectorTest.groovy index 98e16618d..5d89ddaa0 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/connectors/CsvFileConnectorTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/connectors/CsvFileConnectorTest.groovy @@ -66,7 +66,7 @@ class CsvFileConnectorTest extends Specification { cfc.shutdown() } - def "The csv file connector is able to provide correct paths time series files"() { + def "The csv file connector is able to provide correct paths to time series files"() { when: def actual = cfc.individualTimeSeriesFilePaths @@ -77,6 +77,44 @@ class CsvFileConnectorTest extends Specification { actual.containsAll(timeSeriesPaths) } + def "The csv file connector is able to build correct uuid to meta information mapping"() { + given: + def expected = [ + (UUID.fromString("53990eea-1b5d-47e8-9134-6d8de36604bf")): new CsvFileConnector.CsvIndividualTimeSeriesMetaInformation(UUID.fromString("53990eea-1b5d-47e8-9134-6d8de36604bf"), ColumnScheme.APPARENT_POWER, "its_pq_53990eea-1b5d-47e8-9134-6d8de36604bf"), + (UUID.fromString("fcf0b851-a836-4bde-8090-f44c382ed226")): new CsvFileConnector.CsvIndividualTimeSeriesMetaInformation(UUID.fromString("fcf0b851-a836-4bde-8090-f44c382ed226"), ColumnScheme.ACTIVE_POWER, "its_p_fcf0b851-a836-4bde-8090-f44c382ed226"), + (UUID.fromString("5022a70e-a58f-4bac-b8ec-1c62376c216b")): new CsvFileConnector.CsvIndividualTimeSeriesMetaInformation(UUID.fromString("5022a70e-a58f-4bac-b8ec-1c62376c216b"), ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND, "its_pqh_5022a70e-a58f-4bac-b8ec-1c62376c216b"), + (UUID.fromString("b88dee50-5484-4136-901d-050d8c1c97d1")): new CsvFileConnector.CsvIndividualTimeSeriesMetaInformation(UUID.fromString("b88dee50-5484-4136-901d-050d8c1c97d1"), ColumnScheme.ENERGY_PRICE, "its_c_b88dee50-5484-4136-901d-050d8c1c97d1"), + (UUID.fromString("c7b0d9d6-5044-4f51-80b4-f221d8b1f14b")): new CsvFileConnector.CsvIndividualTimeSeriesMetaInformation(UUID.fromString("c7b0d9d6-5044-4f51-80b4-f221d8b1f14b"), ColumnScheme.ENERGY_PRICE, "its_c_c7b0d9d6-5044-4f51-80b4-f221d8b1f14b"), + (UUID.fromString("085d98ee-09a2-4de4-b119-83949690d7b6")): new CsvFileConnector.CsvIndividualTimeSeriesMetaInformation(UUID.fromString("085d98ee-09a2-4de4-b119-83949690d7b6"), ColumnScheme.WEATHER, "its_weather_085d98ee-09a2-4de4-b119-83949690d7b6") + ] + + when: + def actual = cfc.buildIndividualTimeSeriesMetaInformation() + + then: + actual == expected + } + + def "The csv file connector returns empty optional, if there is no meta information for queried time series"() { + when: + def actual = cfc.getIndividualTimeSeriesMetaInformation(UUID.fromString("2602e863-3eb6-480e-b752-a3e653af74ec")) + + then: + !actual.present + } + + def "The csv file connector returns correct individual time series meta information"() { + given: + def timeSeriesUuid = UUID.fromString("b88dee50-5484-4136-901d-050d8c1c97d1") + def expected = Optional.of(new CsvFileConnector.CsvIndividualTimeSeriesMetaInformation(timeSeriesUuid, ColumnScheme.ENERGY_PRICE, "its_c_b88dee50-5484-4136-901d-050d8c1c97d1")) + + when: + def actual = cfc.getIndividualTimeSeriesMetaInformation(timeSeriesUuid) + + then: + actual == expected + } + def "The csv file connector returns empty Optional of TimeSeriesReadingData when pointed to non-individual time series"() { given: def pathString = "lpts_h0_53990eea-1b5d-47e8-9134-6d8de36604bf" diff --git a/src/test/groovy/edu/ie3/datamodel/io/csv/FileNamingStrategyTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/csv/FileNamingStrategyTest.groovy index 7ad0715aa..92441daed 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/csv/FileNamingStrategyTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/csv/FileNamingStrategyTest.groovy @@ -8,6 +8,7 @@ package edu.ie3.datamodel.io.csv import edu.ie3.datamodel.io.csv.timeseries.ColumnScheme import edu.ie3.datamodel.io.csv.timeseries.IndividualTimeSeriesMetaInformation import edu.ie3.datamodel.io.csv.timeseries.LoadProfileTimeSeriesMetaInformation +import edu.ie3.datamodel.io.source.TimeSeriesMappingSource import edu.ie3.datamodel.models.BdewLoadProfile import edu.ie3.datamodel.models.UniqueEntity import edu.ie3.datamodel.models.input.MeasurementUnitInput @@ -39,7 +40,6 @@ import edu.ie3.datamodel.models.result.thermal.ThermalHouseResult import edu.ie3.datamodel.models.timeseries.IntValue import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue -import edu.ie3.datamodel.models.timeseries.mapping.TimeSeriesMapping import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileInput import edu.ie3.datamodel.models.timeseries.repetitive.RepetitiveTimeSeries import edu.ie3.datamodel.models.value.EnergyPriceValue @@ -591,7 +591,7 @@ class FileNamingStrategyTest extends Specification { FileNamingStrategy strategy = new FileNamingStrategy() when: - Optional res = strategy.getFileName(TimeSeriesMapping.Entry) + Optional res = strategy.getFileName(TimeSeriesMappingSource.MappingEntry) then: res.present @@ -603,7 +603,7 @@ class FileNamingStrategyTest extends Specification { FileNamingStrategy strategy = new FileNamingStrategy("prefix", "suffix") when: - Optional res = strategy.getFileName(TimeSeriesMapping.Entry) + Optional res = strategy.getFileName(TimeSeriesMappingSource.MappingEntry) then: res.present @@ -621,41 +621,41 @@ class FileNamingStrategyTest extends Specification { actual == expected where: - modelClass || expected - FixedFeedInInput || Optional.empty() - PvInput || Optional.empty() - WecInput || Optional.empty() - ChpInput || Optional.empty() - BmInput || Optional.empty() - EvInput || Optional.empty() - LoadInput || Optional.empty() - StorageInput || Optional.empty() - HpInput || Optional.empty() - LineInput || Optional.empty() - SwitchInput || Optional.empty() - NodeInput || Optional.empty() - MeasurementUnitInput || Optional.empty() - EvcsInput || Optional.empty() - Transformer2WInput || Optional.empty() - Transformer3WInput || Optional.empty() - CylindricalStorageInput || Optional.empty() - ThermalHouseInput || Optional.empty() - BmTypeInput || Optional.empty() - ChpTypeInput || Optional.empty() - EvTypeInput || Optional.empty() - HpTypeInput || Optional.empty() - LineTypeInput || Optional.empty() - StorageTypeInput || Optional.empty() - Transformer2WTypeInput || Optional.empty() - Transformer3WTypeInput || Optional.empty() - WecTypeInput || Optional.empty() - WecTypeInput || Optional.empty() - RandomLoadParameters || Optional.empty() - NodeGraphicInput || Optional.empty() - LineGraphicInput || Optional.empty() - WecCharacteristicInput || Optional.empty() - EvCharacteristicInput || Optional.empty() - TimeSeriesMapping.Entry || Optional.empty() + modelClass || expected + FixedFeedInInput || Optional.empty() + PvInput || Optional.empty() + WecInput || Optional.empty() + ChpInput || Optional.empty() + BmInput || Optional.empty() + EvInput || Optional.empty() + LoadInput || Optional.empty() + StorageInput || Optional.empty() + HpInput || Optional.empty() + LineInput || Optional.empty() + SwitchInput || Optional.empty() + NodeInput || Optional.empty() + MeasurementUnitInput || Optional.empty() + EvcsInput || Optional.empty() + Transformer2WInput || Optional.empty() + Transformer3WInput || Optional.empty() + CylindricalStorageInput || Optional.empty() + ThermalHouseInput || Optional.empty() + BmTypeInput || Optional.empty() + ChpTypeInput || Optional.empty() + EvTypeInput || Optional.empty() + HpTypeInput || Optional.empty() + LineTypeInput || Optional.empty() + StorageTypeInput || Optional.empty() + Transformer2WTypeInput || Optional.empty() + Transformer3WTypeInput || Optional.empty() + WecTypeInput || Optional.empty() + WecTypeInput || Optional.empty() + RandomLoadParameters || Optional.empty() + NodeGraphicInput || Optional.empty() + LineGraphicInput || Optional.empty() + WecCharacteristicInput || Optional.empty() + EvCharacteristicInput || Optional.empty() + TimeSeriesMappingSource.MappingEntry || Optional.empty() } def "A simple file naming strategy does return empty sub directory path for any result class"() { diff --git a/src/test/groovy/edu/ie3/datamodel/io/csv/HierarchicFileNamingStrategyTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/csv/HierarchicFileNamingStrategyTest.groovy index 68f829280..07a733553 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/csv/HierarchicFileNamingStrategyTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/csv/HierarchicFileNamingStrategyTest.groovy @@ -5,6 +5,7 @@ */ package edu.ie3.datamodel.io.csv +import edu.ie3.datamodel.io.source.TimeSeriesMappingSource import edu.ie3.datamodel.models.BdewLoadProfile import edu.ie3.datamodel.models.UniqueEntity import edu.ie3.datamodel.models.input.MeasurementUnitInput @@ -35,7 +36,6 @@ import edu.ie3.datamodel.models.result.thermal.CylindricalStorageResult import edu.ie3.datamodel.models.result.thermal.ThermalHouseResult import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue -import edu.ie3.datamodel.models.timeseries.mapping.TimeSeriesMapping import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileInput import edu.ie3.datamodel.models.timeseries.repetitive.RepetitiveTimeSeries import edu.ie3.datamodel.models.value.EnergyPriceValue @@ -425,7 +425,7 @@ class HierarchicFileNamingStrategyTest extends Specification { def strategy = new HierarchicFileNamingStrategy(defaultHierarchy) when: - def res = strategy.getDirectoryPath(TimeSeriesMapping.Entry) + def res = strategy.getDirectoryPath(TimeSeriesMappingSource.MappingEntry) then: res.present @@ -437,7 +437,7 @@ class HierarchicFileNamingStrategyTest extends Specification { def strategy = new HierarchicFileNamingStrategy(defaultHierarchy) when: - def res = strategy.getFilePath(TimeSeriesMapping.Entry) + def res = strategy.getFilePath(TimeSeriesMappingSource.MappingEntry) then: res.present @@ -449,7 +449,7 @@ class HierarchicFileNamingStrategyTest extends Specification { def strategy = new HierarchicFileNamingStrategy("prefix", "suffix", defaultHierarchy) when: - def res = strategy.getFilePath(TimeSeriesMapping.Entry) + def res = strategy.getFilePath(TimeSeriesMappingSource.MappingEntry) then: res.present diff --git a/src/test/groovy/edu/ie3/datamodel/io/processor/ProcessorProviderTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/processor/ProcessorProviderTest.groovy index d5f58b14e..57be829de 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/processor/ProcessorProviderTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/processor/ProcessorProviderTest.groovy @@ -9,6 +9,7 @@ import edu.ie3.datamodel.exceptions.ProcessorProviderException import edu.ie3.datamodel.io.processor.result.ResultEntityProcessor import edu.ie3.datamodel.io.processor.timeseries.TimeSeriesProcessor import edu.ie3.datamodel.io.processor.timeseries.TimeSeriesProcessorKey +import edu.ie3.datamodel.io.source.TimeSeriesMappingSource import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.input.MeasurementUnitInput import edu.ie3.datamodel.models.input.NodeInput @@ -41,7 +42,6 @@ import edu.ie3.datamodel.models.timeseries.TimeSeries import edu.ie3.datamodel.models.timeseries.TimeSeriesEntry import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue -import edu.ie3.datamodel.models.timeseries.mapping.TimeSeriesMapping import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileEntry import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileInput import edu.ie3.datamodel.models.value.* @@ -62,7 +62,7 @@ class ProcessorProviderTest extends Specification implements TimeSeriesTestData /* InputEntity */ OperatorInput, RandomLoadParameters, - TimeSeriesMapping.Entry, + TimeSeriesMappingSource.MappingEntry, /* - AssetInput */ NodeInput, LineInput, diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMappingSourceIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMappingSourceIT.groovy new file mode 100644 index 000000000..ac1ad471f --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMappingSourceIT.groovy @@ -0,0 +1,92 @@ +/* + * © 2021. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.source.csv + +import edu.ie3.datamodel.io.connectors.CsvFileConnector +import edu.ie3.datamodel.io.csv.FileNamingStrategy +import edu.ie3.datamodel.io.csv.timeseries.ColumnScheme +import edu.ie3.datamodel.io.source.TimeSeriesMappingSource +import spock.lang.Shared +import spock.lang.Specification + +class CsvTimeSeriesMappingSourceIT extends Specification implements CsvTestDataMeta { + @Shared + TimeSeriesMappingSource source + + def setupSpec() { + source = new CsvTimeSeriesMappingSource(";", timeSeriesFolderPath, new FileNamingStrategy()) + } + + def "The csv time series mapping source is able to provide a valid time series mapping from files"() { + given: + def expectedMapping = [ + (UUID.fromString("b86e95b0-e579-4a80-a534-37c7a470a409")) : UUID.fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5"), + (UUID.fromString("c7ebcc6c-55fc-479b-aa6b-6fa82ccac6b8")) : UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26"), + (UUID.fromString("90a96daa-012b-4fea-82dc-24ba7a7ab81c")) : UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") + ] + + when: + def actualMapping = source.mapping + + then: + actualMapping.size() == expectedMapping.size() + + expectedMapping.entrySet().stream().allMatch { entry -> + actualMapping.containsKey(entry.key) && actualMapping.get(entry.key) == entry.value + } + } + + def "The csv time series mapping source returns empty optional on not covered model"() { + given: + def modelUuid = UUID.fromString("60b9a3da-e56c-40ff-ace7-8060cea84baf") + + when: + def actual = source.getTimeSeriesUuid(modelUuid) + + then: + !actual.present + } + + def "The csv time series mapping source is able to return the correct time series uuid"() { + given: + def modelUuid = UUID.fromString("c7ebcc6c-55fc-479b-aa6b-6fa82ccac6b8") + def expectedUuid = UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") + + when: + def actual = source.getTimeSeriesUuid(modelUuid) + + then: + actual.present + actual.get() == expectedUuid + } + + def "A csv time series mapping source returns empty optional on meta information for non existing time series"() { + given: + def timeSeriesUuid = UUID.fromString("f5eb3be5-98db-40de-85b0-243507636cd5") + + when: + def actual = source.getTimeSeriesMetaInformation(timeSeriesUuid) + + then: + !actual.present + } + + def "A csv time series mapping source returns correct meta information for an existing time series"() { + given: + def timeSeriesUuid = UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") + def expected = new CsvFileConnector.CsvIndividualTimeSeriesMetaInformation( + timeSeriesUuid, + ColumnScheme.APPARENT_POWER, + "its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26") + + when: + def actual = source.getTimeSeriesMetaInformation(timeSeriesUuid) + + then: + actual.present + actual.get() == expected + } +} diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceIT.groovy index 9e221b89d..1d74241e9 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceIT.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceIT.groovy @@ -5,140 +5,86 @@ */ package edu.ie3.datamodel.io.source.csv -import static edu.ie3.datamodel.models.StandardUnits.* -import edu.ie3.datamodel.io.connectors.CsvFileConnector +import edu.ie3.datamodel.exceptions.SourceException import edu.ie3.datamodel.io.csv.FileNamingStrategy -import edu.ie3.datamodel.io.csv.timeseries.ColumnScheme import edu.ie3.datamodel.io.factory.timeseries.TimeBasedSimpleValueFactory -import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries -import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue -import edu.ie3.datamodel.models.timeseries.mapping.TimeSeriesMapping -import edu.ie3.datamodel.models.value.* +import edu.ie3.datamodel.models.StandardUnits +import edu.ie3.datamodel.models.value.HeatAndPValue import edu.ie3.util.TimeUtil +import edu.ie3.util.interval.ClosedInterval import spock.lang.Shared import spock.lang.Specification import tech.units.indriya.quantity.Quantities -import java.nio.charset.StandardCharsets - - class CsvTimeSeriesSourceIT extends Specification implements CsvTestDataMeta { @Shared CsvTimeSeriesSource source + @Shared + TimeBasedSimpleValueFactory factory + def setup() { - source = new CsvTimeSeriesSource(";", timeSeriesFolderPath, new FileNamingStrategy()) + factory = new TimeBasedSimpleValueFactory<>(HeatAndPValue) + source = new CsvTimeSeriesSource(";", timeSeriesFolderPath, new FileNamingStrategy(), UUID.fromString("76c9d846-797c-4f07-b7ec-2245f679f5c7"), "its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7", HeatAndPValue, factory) } - def "The csv time series source is able to provide an individual time series from given field to object function"() { + def "A csv time series source throw an Exception, if the file cannot be found"() { given: - def heatAndSValueFunction = { fieldToValues -> source.buildTimeBasedValue(fieldToValues, HeatAndSValue, new TimeBasedSimpleValueFactory<>(HeatAndSValue)) } - def tsUuid = UUID.fromString("46be1e57-e4ed-4ef7-95f1-b2b321cb2047") - def filePath = new File(this.getClass().getResource( File.separator + "testTimeSeriesFiles" + File.separator + "its_pqh_46be1e57-e4ed-4ef7-95f1-b2b321cb2047.csv").toURI()) - def readingData = new CsvFileConnector.TimeSeriesReadingData( - tsUuid, - ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND, - new BufferedReader( - new InputStreamReader(new FileInputStream(filePath), StandardCharsets.UTF_8), 16384) - ) - def expected = new IndividualTimeSeries( - tsUuid, - [ - new TimeBasedValue( - UUID.fromString("661ac594-47f0-4442-8d82-bbeede5661f7"), - TimeUtil.withDefaults.toZonedDateTime("2020-01-01 00:00:00"), - new HeatAndSValue( - Quantities.getQuantity(1000.0, ACTIVE_POWER_IN), - Quantities.getQuantity(329.0, REACTIVE_POWER_IN), - Quantities.getQuantity(8.0, HEAT_DEMAND) - - )), - new TimeBasedValue( - UUID.fromString("5adcd6c5-a903-433f-b7b5-5fe669a3ed30"), - TimeUtil.withDefaults.toZonedDateTime("2020-01-01 00:15:00"), - new HeatAndSValue( - Quantities.getQuantity(1250.0, ACTIVE_POWER_IN), - Quantities.getQuantity(411.0, REACTIVE_POWER_IN), - Quantities.getQuantity(12.0, HEAT_DEMAND) - - ))] as Set - ) + def filePath = "file/not/found.csv" when: - def actual = source.buildIndividualTimeSeries(readingData, heatAndSValueFunction) + source.buildIndividualTimeSeries(UUID.fromString("fbc59b5b-9307-4fb4-a406-c1f08f26fee5"), filePath, { null }) then: - actual.with { - assert uuid == tsUuid - assert entries.size() == expected.entries.size() - assert entries.containsAll(expected.entries) - } - /* Close the reader */ - readingData.reader.close() + def ex = thrown(SourceException) + ex.message == "Unable to find a file with path '" + filePath + "'." + ex.cause.class == FileNotFoundException } - def "The csv time series source is able to acquire all time series of a given type"() { + def "A csv time series source is able to read in a proper file correctly"() { given: - def filePath0 = new File(this.getClass().getResource( File.separator + "testTimeSeriesFiles" + File.separator + "its_pq_1061af70-1c03-46e1-b960-940b956c429f.csv").toURI()) - def tsUuid0 = UUID.fromString("1061af70-1c03-46e1-b960-940b956c429f") - def filePath1 = new File(this.getClass().getResource( File.separator + "testTimeSeriesFiles" + File.separator + "its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26.csv").toURI()) - def tsUuid1 = UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") - def readingData = [ - new CsvFileConnector.TimeSeriesReadingData( - tsUuid0, - ColumnScheme.APPARENT_POWER, - new BufferedReader( - new InputStreamReader(new FileInputStream(filePath0), StandardCharsets.UTF_8), 16384) - ), - new CsvFileConnector.TimeSeriesReadingData( - tsUuid1, - ColumnScheme.APPARENT_POWER, - new BufferedReader( - new InputStreamReader(new FileInputStream(filePath1), StandardCharsets.UTF_8), 16384) - ) - ] as Set - def factory = new TimeBasedSimpleValueFactory<>(SValue) + def filePath = "its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7" + def tsUuid = UUID.fromString("76c9d846-797c-4f07-b7ec-2245f679f5c7") when: - def actual = source.read(readingData, SValue, factory) + def actual = source.buildIndividualTimeSeries(tsUuid, filePath, { source.buildTimeBasedValue(it, HeatAndPValue, factory) }) then: - Objects.nonNull(actual) - actual.size() == 2 + noExceptionThrown() + actual.entries.size() == 2 } - def "The csv time series source is able to acquire all time series"() { + def "Construction a csv time series source with malicious parameters, leads to IllegalArgumentException"() { when: - def actual = source.timeSeries + new CsvTimeSeriesSource(";", timeSeriesFolderPath, new FileNamingStrategy(), UUID.fromString("fbc59b5b-9307-4fb4-a406-c1f08f26fee5"), "file/not/found", HeatAndPValue, factory) then: - Objects.nonNull(actual) - actual.with { - assert energyPrice.size() == 1 - assert heatAndApparentPower.size() == 1 - assert heatAndActivePower.size() == 1 - assert heat.size() == 1 - assert apparentPower.size() == 2 - assert activePower.size() == 1 - } + def e = thrown(IllegalArgumentException) + e.message == "Unable to obtain time series with UUID 'fbc59b5b-9307-4fb4-a406-c1f08f26fee5'. Please check arguments!" + e.cause.class == SourceException } - def "The csv time series source is able to provide either mapping an time series, that can be put together"() { + def "A csv time series source is able to return a time series for a period of interest"() { + given: + def interval = new ClosedInterval(TimeUtil.withDefaults.toZonedDateTime("2020-01-01 00:15:00"), TimeUtil.withDefaults.toZonedDateTime("2020-01-01 00:15:00")) + + when: + def actual = source.getTimeSeries(interval) + + then: + actual.entries.size() == 1 + } + + def "A csv time series source is able to return a single value, if it is covered"() { + given: + def time = TimeUtil.withDefaults.toZonedDateTime("2020-01-01 00:15:00") + when: - def mappingEntries = source.mapping - def timeSeries = source.timeSeries - def mapping = new TimeSeriesMapping(mappingEntries, timeSeries.all) + def actual = source.getValue(time) then: - mapping.with { - assert it.mapping.size() == 3 - assert it.mapping.containsKey(UUID.fromString("b86e95b0-e579-4a80-a534-37c7a470a409")) - assert it.mapping.get(UUID.fromString("b86e95b0-e579-4a80-a534-37c7a470a409")).uuid == UUID.fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5") - assert it.mapping.containsKey(UUID.fromString("c7ebcc6c-55fc-479b-aa6b-6fa82ccac6b8")) - assert it.mapping.get(UUID.fromString("c7ebcc6c-55fc-479b-aa6b-6fa82ccac6b8")).uuid == UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") - assert it.mapping.containsKey(UUID.fromString("90a96daa-012b-4fea-82dc-24ba7a7ab81c")) - assert it.mapping.get(UUID.fromString("90a96daa-012b-4fea-82dc-24ba7a7ab81c")).uuid == UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") - } + actual.present + actual.get() == new HeatAndPValue(Quantities.getQuantity(1250.0, StandardUnits.ACTIVE_POWER_IN), Quantities.getQuantity(12.0, StandardUnits.HEAT_DEMAND)) } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceTest.groovy index 3bc1b33d2..698c3132a 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesSourceTest.groovy @@ -5,14 +5,16 @@ */ package edu.ie3.datamodel.io.source.csv -import static edu.ie3.datamodel.models.StandardUnits.* +import static edu.ie3.datamodel.models.StandardUnits.ENERGY_PRICE +import edu.ie3.datamodel.exceptions.SourceException +import edu.ie3.datamodel.io.connectors.CsvFileConnector import edu.ie3.datamodel.io.csv.FileNamingStrategy +import edu.ie3.datamodel.io.csv.timeseries.ColumnScheme import edu.ie3.datamodel.io.factory.timeseries.TimeBasedSimpleValueFactory import edu.ie3.datamodel.io.source.IdCoordinateSource import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue -import edu.ie3.datamodel.models.timeseries.mapping.TimeSeriesMapping -import edu.ie3.datamodel.models.value.EnergyPriceValue +import edu.ie3.datamodel.models.value.* import edu.ie3.util.TimeUtil import edu.ie3.util.geo.GeoUtils import org.locationtech.jts.geom.Coordinate @@ -22,36 +24,19 @@ import tech.units.indriya.quantity.Quantities import java.time.ZoneId class CsvTimeSeriesSourceTest extends Specification implements CsvTestDataMeta { - def "The csv time series source is able to provide a valid time series mapping from files"() { - given: - def source = new CsvTimeSeriesSource(";", timeSeriesFolderPath, new FileNamingStrategy()) - def expectedMapping = [ - new TimeSeriesMapping.Entry(UUID.fromString("58167015-d760-4f90-8109-f2ebd94cda91"), UUID.fromString("b86e95b0-e579-4a80-a534-37c7a470a409"), UUID.fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5")), - new TimeSeriesMapping.Entry(UUID.fromString("9a9ebfda-dc26-4a40-b9ca-25cd42f6cc3f"), UUID.fromString("c7ebcc6c-55fc-479b-aa6b-6fa82ccac6b8"), UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26")), - new TimeSeriesMapping.Entry(UUID.fromString("9c1c53ea-e575-41a2-a373-a8b2d3ed2c39"), UUID.fromString("90a96daa-012b-4fea-82dc-24ba7a7ab81c"), UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26")) - ] - - when: - def mappingEntries = source.mapping - - then: - mappingEntries.size() == expectedMapping.size() - - expectedMapping.stream().allMatch { mappingEntries.contains(it) } - } def "The csv time series source is able to build time based values from simple data"() { given: def defaultCoordinate = GeoUtils.DEFAULT_GEOMETRY_FACTORY.createPoint(new Coordinate(7.4116482, 51.4843281)) def coordinateSource = Mock(IdCoordinateSource) coordinateSource.getCoordinate(5) >> defaultCoordinate - def source = new CsvTimeSeriesSource(";", timeSeriesFolderPath, new FileNamingStrategy()) def factory = new TimeBasedSimpleValueFactory(EnergyPriceValue) + def source = new CsvTimeSeriesSource(";", timeSeriesFolderPath, new FileNamingStrategy(), UUID.fromString("2fcb3e53-b94a-4b96-bea4-c469e499f1a1"), "its_c_2fcb3e53-b94a-4b96-bea4-c469e499f1a1", EnergyPriceValue, factory) def time = TimeUtil.withDefaults.toZonedDateTime("2019-01-01 00:00:00") def timeUtil = new TimeUtil(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd'T'HH:mm:ss[.S[S][S]]'Z'") def fieldToValue = [ - "uuid": "78ca078a-e6e9-4972-a58d-b2cadbc2df2c", - "time": timeUtil.toString(time), + "uuid" : "78ca078a-e6e9-4972-a58d-b2cadbc2df2c", + "time" : timeUtil.toString(time), "price": "52.4" ] def expected = new TimeBasedValue( @@ -67,4 +52,37 @@ class CsvTimeSeriesSourceTest extends Specification implements CsvTestDataMeta { actual.present actual.get() == expected } + + def "The factory method in csv time series source refuses to build time series with unsupported column type"() { + given: + def metaInformation = new CsvFileConnector.CsvIndividualTimeSeriesMetaInformation(UUID.fromString("8bc9120d-fb9b-4484-b4e3-0cdadf0feea9"), ColumnScheme.WEATHER, "its_weather_8bc9120d-fb9b-4484-b4e3-0cdadf0feea9") + + when: + CsvTimeSeriesSource.getSource(";", timeSeriesFolderPath, fileNamingStrategy, metaInformation) + + then: + def e = thrown(SourceException) + e.message == "Unsupported column scheme '" + ColumnScheme.WEATHER + "'." + } + + def "The factory method in csv time series source builds a time series source for all supported column types"() { + given: + def metaInformation = new CsvFileConnector.CsvIndividualTimeSeriesMetaInformation(uuid, columnScheme, path) + + when: + def actual = CsvTimeSeriesSource.getSource(";", timeSeriesFolderPath, fileNamingStrategy, metaInformation) + + then: + actual.timeSeries.entries.size() == amountOfEntries + actual.timeSeries.entries[0].value.class == valueClass + + where: + uuid | columnScheme | path || amountOfEntries | valueClass + UUID.fromString("2fcb3e53-b94a-4b96-bea4-c469e499f1a1") | ColumnScheme.ENERGY_PRICE | "its_c_2fcb3e53-b94a-4b96-bea4-c469e499f1a1" || 2 | EnergyPriceValue + UUID.fromString("c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0") | ColumnScheme.HEAT_DEMAND | "its_h_c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0" || 2 | HeatDemandValue + UUID.fromString("9185b8c1-86ba-4a16-8dea-5ac898e8caa5") | ColumnScheme.ACTIVE_POWER | "its_p_9185b8c1-86ba-4a16-8dea-5ac898e8caa5" || 2 | PValue + UUID.fromString("76c9d846-797c-4f07-b7ec-2245f679f5c7") | ColumnScheme.ACTIVE_POWER_AND_HEAT_DEMAND | "its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7" || 2 | HeatAndPValue + UUID.fromString("3fbfaa97-cff4-46d4-95ba-a95665e87c26") | ColumnScheme.APPARENT_POWER | "its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26" || 2 | SValue + UUID.fromString("46be1e57-e4ed-4ef7-95f1-b2b321cb2047") | ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND | "its_pqh_46be1e57-e4ed-4ef7-95f1-b2b321cb2047" || 2 | HeatAndSValue + } } diff --git a/src/test/groovy/edu/ie3/datamodel/models/timeseries/mapping/TimeSeriesMappingTest.groovy b/src/test/groovy/edu/ie3/datamodel/models/timeseries/mapping/TimeSeriesMappingTest.groovy deleted file mode 100644 index fa9121b29..000000000 --- a/src/test/groovy/edu/ie3/datamodel/models/timeseries/mapping/TimeSeriesMappingTest.groovy +++ /dev/null @@ -1,123 +0,0 @@ -/* - * © 2021. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation - */ -package edu.ie3.datamodel.models.timeseries.mapping - -import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries -import spock.lang.Shared -import spock.lang.Specification - -class TimeSeriesMappingTest extends Specification { - @Shared - IndividualTimeSeries ts0, ts1, ts2, ts3 - - def setupSpec() { - ts0 = Mock(IndividualTimeSeries) - ts0.uuid >> UUID.fromString("b09af80e-d65c-4339-b5af-5504339a3180") - ts1 = Mock(IndividualTimeSeries) - ts1.uuid >> UUID.fromString("ae675233-89ac-4323-b951-df406491f2f0") - ts2 = Mock(IndividualTimeSeries) - ts2.uuid >> UUID.fromString("9c26cc08-3c6e-4ce5-98c5-e8a6925e665c") - ts3 = Mock(IndividualTimeSeries) - ts3.uuid >> UUID.fromString("b8a7e2e7-6f66-4aa5-9c36-6b6bb323b37c") - } - - def "The time series mapping is build correctly"() { - given: - def ts = [ts0, ts1, ts2, ts3] - - def entries = [ - new TimeSeriesMapping.Entry(UUID.randomUUID(), UUID.fromString("502351b5-21f1-489a-8ac0-b85893cbfa47"), ts0.uuid), - new TimeSeriesMapping.Entry(UUID.randomUUID(), UUID.fromString("05d22f64-f252-4c4c-b724-dc69a0611ffe"), ts1.uuid), - new TimeSeriesMapping.Entry(UUID.randomUUID(), UUID.fromString("43ca59dd-c70c-4184-9638-fd50da53847c"), ts2.uuid), - new TimeSeriesMapping.Entry(UUID.randomUUID(), UUID.fromString("e6b6d460-e7ca-40ae-aa70-3f9e04ae52f0"), ts3.uuid) - ] - - def expectedMapping = [:] - expectedMapping.put(UUID.fromString("502351b5-21f1-489a-8ac0-b85893cbfa47"), ts0) - expectedMapping.put(UUID.fromString("05d22f64-f252-4c4c-b724-dc69a0611ffe"), ts1) - expectedMapping.put(UUID.fromString("43ca59dd-c70c-4184-9638-fd50da53847c"), ts2) - expectedMapping.put(UUID.fromString("e6b6d460-e7ca-40ae-aa70-3f9e04ae52f0"), ts3) - - when: - def tsm = new TimeSeriesMapping(entries, ts) - - then: - tsm.mapping == expectedMapping - } - - - def "The time series mapping throws an Exception, if a time series is missing"() { - given: - def ts = [ts0, ts1, ts2, ts3] - - def entries = [ - new TimeSeriesMapping.Entry(UUID.randomUUID(), UUID.fromString("502351b5-21f1-489a-8ac0-b85893cbfa47"), ts0.uuid), - new TimeSeriesMapping.Entry(UUID.randomUUID(), UUID.fromString("05d22f64-f252-4c4c-b724-dc69a0611ffe"), UUID.fromString("ae675233-89ac-4323-b951-df406491f2f1")), - new TimeSeriesMapping.Entry(UUID.randomUUID(), UUID.fromString("43ca59dd-c70c-4184-9638-fd50da53847c"), ts2.uuid), - new TimeSeriesMapping.Entry(UUID.randomUUID(), UUID.fromString("e6b6d460-e7ca-40ae-aa70-3f9e04ae52f0"), ts3.uuid) - ] - - when: - new TimeSeriesMapping(entries, ts) - - then: - def ex = thrown(IllegalArgumentException) - ex.message == "Cannot find referenced time series with uuid 'ae675233-89ac-4323-b951-df406491f2f1'." - } - - def "The time series mapping returns correct entry on request"() { - given: - def mapping = [:] - mapping.put(UUID.fromString("502351b5-21f1-489a-8ac0-b85893cbfa47"), ts0) - mapping.put(UUID.fromString("05d22f64-f252-4c4c-b724-dc69a0611ffe"), ts1) - mapping.put(UUID.fromString("43ca59dd-c70c-4184-9638-fd50da53847c"), ts2) - mapping.put(UUID.fromString("e6b6d460-e7ca-40ae-aa70-3f9e04ae52f0"), ts3) - def tsm = new TimeSeriesMapping(mapping) - - when: - def actual = tsm.get(participant) - - then: - actual.present - actual.get() == expectedTs - - where: - participant || expectedTs - UUID.fromString("502351b5-21f1-489a-8ac0-b85893cbfa47") || ts0 - UUID.fromString("05d22f64-f252-4c4c-b724-dc69a0611ffe") || ts1 - UUID.fromString("43ca59dd-c70c-4184-9638-fd50da53847c") || ts2 - UUID.fromString("e6b6d460-e7ca-40ae-aa70-3f9e04ae52f0") || ts3 - } - - def "The time series mapping returns correct entries"() { - given: - def mapping = [:] - mapping.put(UUID.fromString("502351b5-21f1-489a-8ac0-b85893cbfa47"), ts0) - mapping.put(UUID.fromString("05d22f64-f252-4c4c-b724-dc69a0611ffe"), ts1) - mapping.put(UUID.fromString("43ca59dd-c70c-4184-9638-fd50da53847c"), ts2) - mapping.put(UUID.fromString("e6b6d460-e7ca-40ae-aa70-3f9e04ae52f0"), ts3) - def tsm = new TimeSeriesMapping(mapping) - - def expectedEntries = [ - new TimeSeriesMapping.Entry(UUID.randomUUID(), UUID.fromString("e6b6d460-e7ca-40ae-aa70-3f9e04ae52f0"), ts3.uuid), - new TimeSeriesMapping.Entry(UUID.randomUUID(), UUID.fromString("05d22f64-f252-4c4c-b724-dc69a0611ffe"), ts1.uuid), - new TimeSeriesMapping.Entry(UUID.randomUUID(), UUID.fromString("43ca59dd-c70c-4184-9638-fd50da53847c"), ts2.uuid), - new TimeSeriesMapping.Entry(UUID.randomUUID(), UUID.fromString("502351b5-21f1-489a-8ac0-b85893cbfa47"), ts0.uuid) - ] - - when: - def actual = tsm.buildEntries().sort { a, b -> a.participant <=> b.participant } - - then: - [actual, expectedEntries].transpose().forEach { it -> - TimeSeriesMapping.Entry left = (it as ArrayList)[0] - TimeSeriesMapping.Entry right = (it as ArrayList)[1] - - assert left.participant == right.participant - assert left.timeSeries == right.timeSeries - } - } -} diff --git a/src/test/groovy/edu/ie3/datamodel/utils/TimeSeriesUtilTest.groovy b/src/test/groovy/edu/ie3/datamodel/utils/TimeSeriesUtilTest.groovy new file mode 100644 index 000000000..4fc9cdb71 --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/utils/TimeSeriesUtilTest.groovy @@ -0,0 +1,48 @@ +/* + * © 2021. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.utils + +import edu.ie3.test.common.TimeSeriesTestData +import edu.ie3.util.interval.ClosedInterval +import spock.lang.Specification + +import java.time.ZoneId +import java.time.ZonedDateTime + +class TimeSeriesUtilTest extends Specification implements TimeSeriesTestData { + def "A time series util is able to trim an individual time series to a given interval"() { + given: + def interval = new ClosedInterval(ZonedDateTime.of(1990, 1, 1, 0, 15, 0, 0, ZoneId.of("UTC")), ZonedDateTime.of(1990, 1, 1, 0, 30, 0, 0, ZoneId.of("UTC"))) + + when: + def actual = TimeSeriesUtil.trimTimeSeriesToInterval(individualIntTimeSeries, interval) + + then: + actual.entries.size() == 2 + } + + def "A time series util returns an empty time series, if the interval is not covered"() { + given: + def interval = new ClosedInterval(ZonedDateTime.of(1990, 12, 1, 0, 15, 0, 0, ZoneId.of("UTC")), ZonedDateTime.of(1990, 12, 1, 0, 30, 0, 0, ZoneId.of("UTC"))) + + when: + def actual = TimeSeriesUtil.trimTimeSeriesToInterval(individualIntTimeSeries, interval) + + then: + actual.entries.size() == 0 + } + + def "A time series util returns only that parts of the time series, that are covered by the interval"() { + given: + def interval = new ClosedInterval(ZonedDateTime.of(1990, 1, 1, 0, 15, 0, 0, ZoneId.of("UTC")), ZonedDateTime.of(1990, 1, 1, 1, 45, 0, 0, ZoneId.of("UTC"))) + + when: + def actual = TimeSeriesUtil.trimTimeSeriesToInterval(individualIntTimeSeries, interval) + + then: + actual.entries.size() == 2 + } +}