From 1f10b7cd89505670642ec5b97343c70848b28811 Mon Sep 17 00:00:00 2001 From: "Kittl, Chris" Date: Fri, 27 Mar 2020 19:53:10 +0100 Subject: [PATCH 01/13] Introducing processor for TimeBasedValue --- .../io/processor/EntityProcessor.java | 334 +-------------- .../ie3/datamodel/io/processor/Processor.java | 399 ++++++++++++++++++ .../processor/input/InputEntityProcessor.java | 2 +- .../input/TimeBasedValueProcessor.java | 112 +++++ .../result/ResultEntityProcessor.java | 2 +- .../ie3/datamodel/models/UniqueEntity.java | 3 + .../input/TimeBasedValueProcessorTest.groovy | 51 +++ 7 files changed, 577 insertions(+), 326 deletions(-) create mode 100644 src/main/java/edu/ie3/datamodel/io/processor/Processor.java create mode 100644 src/main/java/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessor.java create mode 100644 src/test/groovy/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessorTest.groovy diff --git a/src/main/java/edu/ie3/datamodel/io/processor/EntityProcessor.java b/src/main/java/edu/ie3/datamodel/io/processor/EntityProcessor.java index 6543ad6ab..14830147c 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/EntityProcessor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/EntityProcessor.java @@ -6,27 +6,13 @@ package edu.ie3.datamodel.io.processor; import edu.ie3.datamodel.exceptions.EntityProcessorException; -import edu.ie3.datamodel.io.factory.input.NodeInputFactory; -import edu.ie3.datamodel.io.processor.result.ResultEntityProcessor; -import edu.ie3.datamodel.models.OperationTime; -import edu.ie3.datamodel.models.StandardLoadProfile; -import edu.ie3.datamodel.models.StandardUnits; import edu.ie3.datamodel.models.UniqueEntity; -import edu.ie3.datamodel.models.input.system.StorageStrategy; -import edu.ie3.datamodel.models.voltagelevels.VoltageLevel; import edu.ie3.util.TimeTools; -import java.beans.Introspector; import java.lang.reflect.Method; import java.time.ZoneId; -import java.time.ZonedDateTime; import java.util.*; -import java.util.stream.Collectors; -import javax.measure.Quantity; -import org.apache.commons.lang3.ArrayUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.locationtech.jts.geom.Geometry; -import org.locationtech.jts.io.geojson.GeoJsonWriter; /** * Internal API Interface for EntityProcessors. Main purpose is to 'de-serialize' models into a @@ -36,33 +22,11 @@ * @version 0.1 * @since 31.01.20 */ -public abstract class EntityProcessor { +public abstract class EntityProcessor extends Processor { public final Logger log = LogManager.getLogger(this.getClass()); - private final Class registeredClass; protected final String[] headerElements; - private final Map fieldNameToMethod; - - private static final String OPERATION_TIME_FIELD_NAME = OperationTime.class.getSimpleName(); - private static final String OPERATES_FROM = "operatesFrom"; - private static final String OPERATES_UNTIL = "operatesUntil"; - - private static final String VOLT_LVL_FIELD_NAME = "voltLvl"; - private static final String VOLT_LVL = NodeInputFactory.VOLT_LVL; - private static final String V_RATED = NodeInputFactory.V_RATED; - - /* Quantities associated to those fields must be treated differently (e.g. input and result), all other quantity / - * field combinations can be treated on a common basis and therefore need no further distinction */ - private static final Set specificQuantityFieldNames = - Collections.unmodifiableSet( - new HashSet<>( - Arrays.asList( - "eConsAnnual", "energy", "eStorage", "q", "p", "pMax", "pOwn", "pThermal"))); - - private static final GeoJsonWriter geoJsonWriter = new GeoJsonWriter(); - - /** Field name of {@link UniqueEntity} uuid */ - private static final String UUID_FIELD_NAME = "uuid"; + private final SortedMap fieldNameToMethod; /** * Create a new EntityProcessor @@ -70,71 +34,13 @@ public abstract class EntityProcessor { * @param registeredClass the class the entity processor should be able to handle */ public EntityProcessor(Class registeredClass) { - this.registeredClass = registeredClass; - this.fieldNameToMethod = registerClass(registeredClass, getAllEligibleClasses()); - this.headerElements = - ArrayUtils - .addAll( // ensures that uuid is always the first entry in the header elements array - new String[] {UUID_FIELD_NAME}, - fieldNameToMethod.keySet().stream() - .filter(x -> !x.toLowerCase().contains(UUID_FIELD_NAME)) - .toArray(String[]::new)); + super(registeredClass); + this.fieldNameToMethod = mapFieldNameToGetter(registeredClass); + this.headerElements = fieldNameToMethod.keySet().toArray(new String[0]); TimeTools.initialize(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd HH:mm:ss"); } - /** - * Register the class provided in the constructor - * - * @param cls class to be registered - * @return an array of strings of all field values of the registered class - */ - private Map registerClass( - Class cls, List> eligibleClasses) { - - final LinkedHashMap resFieldNameToMethod = new LinkedHashMap<>(); - - if (!eligibleClasses.contains(cls)) - throw new EntityProcessorException( - "Cannot register class '" - + cls.getSimpleName() - + "' with entity processor '" - + this.getClass().getSimpleName() - + "'. Eligible classes: " - + eligibleClasses.stream() - .map(Class::getSimpleName) - .collect(Collectors.joining(", "))); - try { - Arrays.stream(Introspector.getBeanInfo(cls, Object.class).getPropertyDescriptors()) - // filter out properties with setters only - .filter(pd -> Objects.nonNull(pd.getReadMethod())) - .forEach( - pd -> { // invoke method to get value - if (pd.getReadMethod() != null) { - - // OperationTime needs to be replaced by operatesFrom and operatesUntil - String fieldName = pd.getName(); - if (fieldName.equalsIgnoreCase(OPERATION_TIME_FIELD_NAME)) { - fieldName = OPERATES_FROM; - resFieldNameToMethod.put(OPERATES_UNTIL, pd.getReadMethod()); - } - - // VoltageLevel needs to be replaced by id and nominalVoltage - if (fieldName.equalsIgnoreCase(VOLT_LVL_FIELD_NAME)) { - fieldName = V_RATED; - resFieldNameToMethod.put(VOLT_LVL, pd.getReadMethod()); - } - resFieldNameToMethod.put(fieldName, pd.getReadMethod()); - } - }); - - } catch (Exception e) { - throw new EntityProcessorException( - "Error during EntityProcessor class registration process. Exception was:" + e); - } - return Collections.unmodifiableMap(resFieldNameToMethod); - } - /** * Standard call to handle an entity * @@ -152,237 +58,17 @@ public Optional> handleEntity(T entity) { + ".class or create a new factory for " + entity.getClass().getSimpleName() + ".class!"); - return processEntity(entity); - } - - /** - * Actual implementation of the entity handling process - * - * @param entity the entity that should be 'de-serialized' into a map of fieldName -> fieldValue - * @return an optional Map with fieldName -> fieldValue or an empty optional if an error occurred - * during processing - */ - private Optional> processEntity(T entity) { - - Optional> resultMapOpt; try { - LinkedHashMap resultMap = new LinkedHashMap<>(); - for (String fieldName : headerElements) { - Method method = fieldNameToMethod.get(fieldName); - Optional methodReturnObjectOpt = Optional.ofNullable(method.invoke(entity)); - - if (methodReturnObjectOpt.isPresent()) { - resultMap.put( - fieldName, processMethodResult(methodReturnObjectOpt.get(), method, fieldName)); - } else { - resultMap.put(fieldName, ""); - } - } - resultMapOpt = Optional.of(resultMap); - } catch (Exception e) { - log.error("Error during entity processing:", e); - resultMapOpt = Optional.empty(); + return Optional.of(processObject(entity, fieldNameToMethod)); + } catch (EntityProcessorException e) { + logger.error("Cannot process the entity{}.", entity, e); + return Optional.empty(); } - return resultMapOpt; - } - - /** - * Processes the returned object to String by taking care of different conventions. - * - * @param methodReturnObject Return object to process - * @param method The method, that is invoked - * @param fieldName Name of the foreseen field - * @return A String representation of the result - */ - private String processMethodResult(Object methodReturnObject, Method method, String fieldName) { - - StringBuilder resultStringBuilder = new StringBuilder(); - - switch (method.getReturnType().getSimpleName()) { - // primitives (Boolean, Character, Byte, Short, Integer, Long, Float, Double, String, - case "UUID": - case "boolean": - case "int": - case "double": - case "String": - resultStringBuilder.append(methodReturnObject.toString()); - break; - case "Quantity": - resultStringBuilder.append( - handleQuantity((Quantity) methodReturnObject, fieldName) - .orElseThrow( - () -> - new EntityProcessorException( - "Unable to process quantity value for attribute '" - + fieldName - + "' in result entity " - + getRegisteredClass().getSimpleName() - + ".class."))); - break; - case "ZonedDateTime": - resultStringBuilder.append(processZonedDateTime((ZonedDateTime) methodReturnObject)); - break; - case "OperationTime": - resultStringBuilder.append( - processOperationTime((OperationTime) methodReturnObject, fieldName)); - break; - case "VoltageLevel": - resultStringBuilder.append( - processVoltageLevel((VoltageLevel) methodReturnObject, fieldName)); - break; - case "Point": - case "LineString": - resultStringBuilder.append(geoJsonWriter.write((Geometry) methodReturnObject)); - break; - case "StandardLoadProfile": - resultStringBuilder.append(((StandardLoadProfile) methodReturnObject).getKey()); - break; - case "StorageStrategy": - resultStringBuilder.append(((StorageStrategy) methodReturnObject).getToken()); - break; - case "NodeInput": - case "SystemParticipantTypeInput": - case "Transformer3WTypeInput": - case "Transformer2WTypeInput": - case "LineTypeInput": - case "LineInput": - case "OperatorInput": - case "WecTypeInput": - case "ThermalBusInput": - case "ThermalStorageInput": - case "ChpTypeInput": - case "BmTypeInput": - case "EvTypeInput": - case "StorageTypeInput": - case "HpTypeInput": - resultStringBuilder.append(((UniqueEntity) methodReturnObject).getUuid()); - break; - case "Optional": // todo needs to be removed asap as this is very dangerous, but necessary as - // long as #75 is not addressed - resultStringBuilder.append(((Optional) methodReturnObject).orElse("")); - break; - default: - throw new EntityProcessorException( - "Unable to process value for attribute/field '" - + fieldName - + "' and method return type '" - + method.getReturnType().getSimpleName() - + "' for method with name '" - + method.getName() - + "' in in entity model " - + getRegisteredClass().getSimpleName() - + ".class."); - } - - return resultStringBuilder.toString(); - } - - /** - * Standard method to process a ZonedDateTime to a String based on a method return object NOTE: - * this method does NOT check if the provided object is of type ZonedDateTime. This has to be done - * manually BEFORE calling this method! - * - * @param zonedDateTime representation of the ZonedDateTime - * @return string representation of the ZonedDateTime - */ - protected String processZonedDateTime(ZonedDateTime zonedDateTime) { - return TimeTools.toString(zonedDateTime); - } - - /** - * Handling of elements of type {@link OperationTime} - * - * @param operationTime the operation time that should be processed - * @param fieldName the field name that should be generated (either operatesFrom or operatesUntil) - * @return the resulting string of a OperationTime attribute value for the provided field or an - * empty string when an invalid field name is provided - */ - protected String processOperationTime(OperationTime operationTime, String fieldName) { - StringBuilder resultStringBuilder = new StringBuilder(); - - if (fieldName.equalsIgnoreCase(OPERATES_FROM)) - operationTime - .getStartDate() - .ifPresent(startDate -> resultStringBuilder.append(processZonedDateTime(startDate))); - - if (fieldName.equalsIgnoreCase(OPERATES_UNTIL)) - operationTime - .getEndDate() - .ifPresent(endDate -> resultStringBuilder.append(processZonedDateTime(endDate))); - - return resultStringBuilder.toString(); - } - - /** - * Handling of elements of type {@link VoltageLevel} - * - * @param voltageLevel the voltage level that should be processed - * @param fieldName the field name that should be generated (either v_rated or volt_lvl) - * @return the resulting string of a VoltageLevel attribute value for the provided field or an - * empty string when an invalid field name is provided - */ - protected String processVoltageLevel(VoltageLevel voltageLevel, String fieldName) { - - StringBuilder resultStringBuilder = new StringBuilder(); - if (fieldName.equalsIgnoreCase(VOLT_LVL)) resultStringBuilder.append(voltageLevel.getId()); - - if (fieldName.equalsIgnoreCase(V_RATED)) - resultStringBuilder.append( - handleQuantity(voltageLevel.getNominalVoltage(), fieldName) - .orElseThrow( - () -> - new EntityProcessorException( - "Unable to process quantity value for attribute '" - + fieldName - + "' in result entity " - + getRegisteredClass().getSimpleName() - + ".class."))); - return resultStringBuilder.toString(); - } - - /** - * Standard method to process a Quantity to a String based on a method return object - * - * @param quantity the quantity that should be processed - * @param fieldName the field name the quantity is set to - * @return an optional string with the normalized to {@link StandardUnits} value of the quantity - * or empty if an error occurred during processing - */ - protected Optional handleQuantity(Quantity quantity, String fieldName) { - if (specificQuantityFieldNames.contains(fieldName)) { - return handleProcessorSpecificQuantity(quantity, fieldName); - } else { - return quantityValToOptionalString(quantity); - } - } - - /** - * This method should handle all quantities that are model processor specific e.g. we need to - * handle active power p different for {@link edu.ie3.datamodel.models.result.ResultEntity}s and - * {@link edu.ie3.datamodel.models.input.system.SystemParticipantInput}s Hence from the - * generalized method {@link this.handleQuantity()}, this allows for the specific handling of - * child implementations. See the implementation @ {@link ResultEntityProcessor} for details. - * - * @param quantity the quantity that should be processed - * @param fieldName the field name the quantity is set to - * @return an optional string with the normalized to {@link StandardUnits} value of the quantity - * or empty if an error occurred during processing - */ - protected abstract Optional handleProcessorSpecificQuantity( - Quantity quantity, String fieldName); - - protected Optional quantityValToOptionalString(Quantity quantity) { - return Optional.of(Double.toString(quantity.getValue().doubleValue())); - } - - public Class getRegisteredClass() { - return registeredClass; } + @Override public String[] getHeaderElements() { return headerElements; } - - protected abstract List> getAllEligibleClasses(); } diff --git a/src/main/java/edu/ie3/datamodel/io/processor/Processor.java b/src/main/java/edu/ie3/datamodel/io/processor/Processor.java new file mode 100644 index 000000000..bceb646e5 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/processor/Processor.java @@ -0,0 +1,399 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.processor; + +import edu.ie3.datamodel.exceptions.EntityProcessorException; +import edu.ie3.datamodel.io.factory.input.NodeInputFactory; +import edu.ie3.datamodel.io.processor.result.ResultEntityProcessor; +import edu.ie3.datamodel.models.OperationTime; +import edu.ie3.datamodel.models.StandardLoadProfile; +import edu.ie3.datamodel.models.StandardUnits; +import edu.ie3.datamodel.models.UniqueEntity; +import edu.ie3.datamodel.models.input.system.StorageStrategy; +import edu.ie3.datamodel.models.voltagelevels.VoltageLevel; +import edu.ie3.util.TimeTools; +import java.beans.Introspector; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.time.ZonedDateTime; +import java.util.*; +import java.util.stream.Collectors; +import javax.measure.Quantity; +import org.locationtech.jts.geom.Geometry; +import org.locationtech.jts.io.geojson.GeoJsonWriter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Basic sketch and skeleton for a processors including all functions that apply for all needed + * subtypes of processors + * + * @param Type parameter of the class to handle + */ +public abstract class Processor { + /** + * Comparator to sort a Map of field name to getter method, so that the first entry is the uuid + * and the rest is sorted alphabetically. + */ + private static class UuidFirstComparator implements Comparator { + @Override + public int compare(String a, String b) { + if (a.equalsIgnoreCase(UniqueEntity.UUID_FIELD_NAME)) return -1; + else return a.compareTo(b); + } + } + + protected static final Logger logger = LoggerFactory.getLogger(Processor.class); + protected final Class registeredClass; + + /* Quantities associated to those fields must be treated differently (e.g. input and result), all other quantity / + * field combinations can be treated on a common basis and therefore need no further distinction */ + private static final Set specificQuantityFieldNames = + Collections.unmodifiableSet( + new HashSet<>( + Arrays.asList( + "eConsAnnual", "energy", "eStorage", "q", "p", "pMax", "pOwn", "pThermal"))); + + private static final GeoJsonWriter geoJsonWriter = new GeoJsonWriter(); + + private static final String OPERATION_TIME_FIELD_NAME = OperationTime.class.getSimpleName(); + private static final String OPERATES_FROM = "operatesFrom"; + private static final String OPERATES_UNTIL = "operatesUntil"; + + private static final String VOLT_LVL_FIELD_NAME = "voltLvl"; + private static final String VOLT_LVL = NodeInputFactory.VOLT_LVL; + private static final String V_RATED = NodeInputFactory.V_RATED; + + /** + * Instantiates a Processor for a foreseen class + * + * @param foreSeenClass Class and its children that are foreseen to be handled with this processor + */ + protected Processor(Class foreSeenClass) { + if (!getEligibleEntityClasses().contains(foreSeenClass)) + throw new EntityProcessorException( + "Cannot register class '" + + foreSeenClass.getSimpleName() + + "' with entity processor '" + + this.getClass().getSimpleName() + + "'. Eligible classes: " + + getEligibleEntityClasses().stream() + .map(Class::getSimpleName) + .collect(Collectors.joining(", "))); + + this.registeredClass = foreSeenClass; + } + + /** + * Maps the foreseen table fields to the objects getters + * + * @param cls class to use for mapping + * @return an array of strings of all field values of the class + */ + protected SortedMap mapFieldNameToGetter(Class cls) { + return mapFieldNameToGetter(cls, Collections.emptyList()); + } + + /** + * Maps the foreseen table fields to the objects getters and ignores the specified fields + * + * @param cls class to use for mapping + * @param ignoreFields A collection of all field names to ignore during process + * @return an array of strings of all field values of the class + */ + protected SortedMap mapFieldNameToGetter( + Class cls, Collection ignoreFields) { + try { + final LinkedHashMap resFieldNameToMethod = new LinkedHashMap<>(); + Arrays.stream(Introspector.getBeanInfo(cls, Object.class).getPropertyDescriptors()) + // filter out properties with setters only + .filter(pd -> Objects.nonNull(pd.getReadMethod())) + .filter(pd -> !ignoreFields.contains(pd.getName())) + .forEach( + pd -> { + String fieldName = pd.getName(); + // OperationTime needs to be replaced by operatesFrom and operatesUntil + if (fieldName.equalsIgnoreCase(OPERATION_TIME_FIELD_NAME)) { + fieldName = OPERATES_FROM; + resFieldNameToMethod.put(OPERATES_UNTIL, pd.getReadMethod()); + } + + // VoltageLevel needs to be replaced by id and nominalVoltage + if (fieldName.equalsIgnoreCase(VOLT_LVL_FIELD_NAME)) { + fieldName = V_RATED; + resFieldNameToMethod.put(VOLT_LVL, pd.getReadMethod()); + } + resFieldNameToMethod.put(fieldName, pd.getReadMethod()); + }); + + return putUuidFirst(resFieldNameToMethod); + } catch (Exception e) { + throw new EntityProcessorException( + "Error during EntityProcessor class registration process.", e); + } + } + + /** + * Ensure, that the uuid field is put first. All other fields are sorted alphabetically. + * Additionally, the map is immutable + * + * @param unsorted The unsorted map + * @return The sorted map - what a surprise! + */ + private SortedMap putUuidFirst(Map unsorted) { + SortedMap sortedMap = new TreeMap<>(new UuidFirstComparator()); + sortedMap.putAll(unsorted); + return Collections.unmodifiableSortedMap(sortedMap); + } + + /** + * Processes the object to a map from field name to value as String representation + * + * @param object The object to process + * @param fieldNameToGetter Mapping from field name to getter + * @return Mapping from field name to value as String representation + */ + protected LinkedHashMap processObject( + Object object, SortedMap fieldNameToGetter) { + try { + LinkedHashMap resultMap = new LinkedHashMap<>(); + for (Map.Entry entry : fieldNameToGetter.entrySet()) { + String fieldName = entry.getKey(); + Method getter = entry.getValue(); + Optional methodReturnObjectOpt = Optional.ofNullable(getter.invoke(object)); + + if (methodReturnObjectOpt.isPresent()) { + resultMap.put( + fieldName, processMethodResult(methodReturnObjectOpt.get(), getter, fieldName)); + } else { + resultMap.put(fieldName, ""); + } + } + return resultMap; + } catch (IllegalAccessException | InvocationTargetException e) { + throw new EntityProcessorException("Processing of object " + object + "failed.", e); + } + } + + /** + * Processes the returned object to String by taking care of different conventions. + * + * @param methodReturnObject Return object to process + * @param method The method, that is invoked + * @param fieldName Name of the foreseen field + * @return A String representation of the result + */ + protected String processMethodResult(Object methodReturnObject, Method method, String fieldName) { + + StringBuilder resultStringBuilder = new StringBuilder(); + + switch (method.getReturnType().getSimpleName()) { + // primitives (Boolean, Character, Byte, Short, Integer, Long, Float, Double, String, + case "UUID": + case "boolean": + case "int": + case "double": + case "String": + resultStringBuilder.append(methodReturnObject.toString()); + break; + case "Quantity": + resultStringBuilder.append( + handleQuantity((Quantity) methodReturnObject, fieldName) + .orElseThrow( + () -> + new EntityProcessorException( + "Unable to process quantity value for attribute '" + + fieldName + + "' in result entity " + + getRegisteredClass().getSimpleName() + + ".class."))); + break; + case "ZonedDateTime": + resultStringBuilder.append(processZonedDateTime((ZonedDateTime) methodReturnObject)); + break; + case "OperationTime": + resultStringBuilder.append( + processOperationTime((OperationTime) methodReturnObject, fieldName)); + break; + case "VoltageLevel": + resultStringBuilder.append( + processVoltageLevel((VoltageLevel) methodReturnObject, fieldName)); + break; + case "Point": + case "LineString": + resultStringBuilder.append(geoJsonWriter.write((Geometry) methodReturnObject)); + break; + case "StandardLoadProfile": + resultStringBuilder.append(((StandardLoadProfile) methodReturnObject).getKey()); + break; + case "StorageStrategy": + resultStringBuilder.append(((StorageStrategy) methodReturnObject).getToken()); + break; + case "NodeInput": + case "SystemParticipantTypeInput": + case "Transformer3WTypeInput": + case "Transformer2WTypeInput": + case "LineTypeInput": + case "LineInput": + case "OperatorInput": + case "WecTypeInput": + case "ThermalBusInput": + case "ThermalStorageInput": + case "ChpTypeInput": + case "BmTypeInput": + case "EvTypeInput": + case "StorageTypeInput": + case "HpTypeInput": + resultStringBuilder.append(((UniqueEntity) methodReturnObject).getUuid()); + break; + case "Optional": // todo needs to be removed asap as this is very dangerous, but necessary as + // long as #75 is not addressed + resultStringBuilder.append(((Optional) methodReturnObject).orElse("")); + break; + default: + throw new EntityProcessorException( + "Unable to process value for attribute/field '" + + fieldName + + "' and method return type '" + + method.getReturnType().getSimpleName() + + "' for method with name '" + + method.getName() + + "' in in entity model " + + getRegisteredClass().getSimpleName() + + ".class."); + } + + return resultStringBuilder.toString(); + } + + /** + * Handling of elements of type {@link VoltageLevel} + * + * @param voltageLevel the voltage level that should be processed + * @param fieldName the field name that should be generated (either v_rated or volt_lvl) + * @return the resulting string of a VoltageLevel attribute value for the provided field or an + * empty string when an invalid field name is provided + */ + protected String processVoltageLevel(VoltageLevel voltageLevel, String fieldName) { + + StringBuilder resultStringBuilder = new StringBuilder(); + if (fieldName.equalsIgnoreCase(VOLT_LVL)) resultStringBuilder.append(voltageLevel.getId()); + + if (fieldName.equalsIgnoreCase(V_RATED)) + resultStringBuilder.append( + handleQuantity(voltageLevel.getNominalVoltage(), fieldName) + .orElseThrow( + () -> + new EntityProcessorException( + "Unable to process quantity value for attribute '" + + fieldName + + "' in result entity " + + getRegisteredClass().getSimpleName() + + ".class."))); + return resultStringBuilder.toString(); + } + + /** + * Standard method to process a Quantity to a String based on a method return object + * + * @param quantity the quantity that should be processed + * @param fieldName the field name the quantity is set to + * @return an optional string with the normalized to {@link StandardUnits} value of the quantity + * or empty if an error occurred during processing + */ + protected Optional handleQuantity(Quantity quantity, String fieldName) { + if (specificQuantityFieldNames.contains(fieldName)) { + return handleProcessorSpecificQuantity(quantity, fieldName); + } else { + return quantityValToOptionalString(quantity); + } + } + + /** + * Handling of elements of type {@link OperationTime} + * + * @param operationTime the operation time that should be processed + * @param fieldName the field name that should be generated (either operatesFrom or operatesUntil) + * @return the resulting string of a OperationTime attribute value for the provided field or an + * empty string when an invalid field name is provided + */ + protected String processOperationTime(OperationTime operationTime, String fieldName) { + StringBuilder resultStringBuilder = new StringBuilder(); + + if (fieldName.equalsIgnoreCase(OPERATES_FROM)) + operationTime + .getStartDate() + .ifPresent(startDate -> resultStringBuilder.append(processZonedDateTime(startDate))); + + if (fieldName.equalsIgnoreCase(OPERATES_UNTIL)) + operationTime + .getEndDate() + .ifPresent(endDate -> resultStringBuilder.append(processZonedDateTime(endDate))); + + return resultStringBuilder.toString(); + } + + /** + * Standard method to process a ZonedDateTime to a String based on a method return object NOTE: + * this method does NOT check if the provided object is of type ZonedDateTime. This has to be done + * manually BEFORE calling this method! + * + * @param zonedDateTime representation of the ZonedDateTime + * @return string representation of the ZonedDateTime + */ + protected String processZonedDateTime(ZonedDateTime zonedDateTime) { + return TimeTools.toString(zonedDateTime); + } + + /** + * This method should handle all quantities that are model processor specific e.g. we need to + * handle active power p different for {@link edu.ie3.datamodel.models.result.ResultEntity}s and + * {@link edu.ie3.datamodel.models.input.system.SystemParticipantInput}s Hence from the + * generalized method {@link this.handleQuantity()}, this allows for the specific handling of + * child implementations. See the implementation @ {@link ResultEntityProcessor} for details. + * + * @param quantity the quantity that should be processed + * @param fieldName the field name the quantity is set to + * @return an optional string with the normalized to {@link StandardUnits} value of the quantity + * or empty if an error occurred during processing + */ + protected abstract Optional handleProcessorSpecificQuantity( + Quantity quantity, String fieldName); + + /** + * Converts a given quantity to String by extracting the value and applying the toString method to + * it + * + * @param quantity Quantity to convert + * @return A string of the quantity's value + */ + protected Optional quantityValToOptionalString(Quantity quantity) { + return Optional.of(Double.toString(quantity.getValue().doubleValue())); + } + + /** + * Return all header elements of the table + * + * @return all header elements of the table + */ + public abstract String[] getHeaderElements(); + + /** + * Reveal the registered class + * + * @return the registered class + */ + protected Class getRegisteredClass() { + return registeredClass; + } + + /** + * Returns a (unmodifiable) {@link List} of classes that this Processors is capable of processing + * + * @return The unmodifiable {@link List} of eligible classes + */ + protected abstract List> getEligibleEntityClasses(); +} diff --git a/src/main/java/edu/ie3/datamodel/io/processor/input/InputEntityProcessor.java b/src/main/java/edu/ie3/datamodel/io/processor/input/InputEntityProcessor.java index f7bd96ce5..ca3868921 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/input/InputEntityProcessor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/input/InputEntityProcessor.java @@ -120,7 +120,7 @@ protected Optional handleProcessorSpecificQuantity( } @Override - protected List> getAllEligibleClasses() { + protected List> getEligibleEntityClasses() { return eligibleEntityClasses; } } diff --git a/src/main/java/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessor.java b/src/main/java/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessor.java new file mode 100644 index 000000000..e5bdc12b9 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessor.java @@ -0,0 +1,112 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.processor.input; + +import edu.ie3.datamodel.exceptions.EntityProcessorException; +import edu.ie3.datamodel.io.processor.Processor; +import edu.ie3.datamodel.models.value.EnergyPriceValue; +import edu.ie3.datamodel.models.value.TimeBasedValue; +import edu.ie3.datamodel.models.value.Value; +import edu.ie3.util.TimeTools; +import java.lang.reflect.Method; +import java.time.ZoneId; +import java.util.*; +import java.util.stream.Stream; +import javax.measure.Quantity; + +/** + * Processor for time based values. It "unboxes" the underlying value and joins the field name to + * value mapping of the time based container and the value itself. + * + * @param Type parameter of the contained {@link Value} + */ +public class TimeBasedValueProcessor extends Processor { + + public static final List> eligibleClasses = + Collections.unmodifiableList(Collections.singletonList(EnergyPriceValue.class)); + + private final SortedMap topLevelFieldNameToGetter; + private final SortedMap valueFieldNameToGetter; + private final String[] headerElements; + + /** + * Constructs the processor and registers the foreseen class of the contained {@link Value} + * + * @param foreSeenClass Foreseen class to be contained in this time based value + */ + public TimeBasedValueProcessor(Class foreSeenClass) { + super(foreSeenClass); + + /* Build a mapping from field name to getter method, disjoint for TimeBasedValue and the value itself */ + this.topLevelFieldNameToGetter = + mapFieldNameToGetter(TimeBasedValue.class, Collections.singletonList("value")); + this.valueFieldNameToGetter = mapFieldNameToGetter(foreSeenClass); + + /* Flatten the collected field name */ + this.headerElements = + Stream.of(topLevelFieldNameToGetter.keySet(), valueFieldNameToGetter.keySet()) + .flatMap(Set::stream) + .toArray(String[]::new); + + TimeTools.initialize(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd HH:mm:ss"); + } + + /** + * Handles a given time based value and returns an option on a mapping from field name on field + * value as String representation + * + * @param timeBasedValue The entity to handle + * @return An option on a mapping from field name to field value as String representation + */ + public Optional> handleEntity(TimeBasedValue timeBasedValue) { + if (!registeredClass.equals(timeBasedValue.getValue().getClass())) + throw new EntityProcessorException( + "Cannot process " + + timeBasedValue.getValue().getClass().getSimpleName() + + ".class with this EntityProcessor. Please either provide an element of " + + registeredClass.getSimpleName() + + ".class or create a new factory for " + + timeBasedValue.getClass().getSimpleName() + + ".class!"); + + /* Process both entities disjoint */ + LinkedHashMap topLevelResult; + LinkedHashMap valueResult; + try { + topLevelResult = processObject(timeBasedValue, topLevelFieldNameToGetter); + } catch (EntityProcessorException e) { + logger.error("Cannot process the time based value {} itself.", timeBasedValue, e); + return Optional.empty(); + } + try { + valueResult = processObject(timeBasedValue.getValue(), valueFieldNameToGetter); + } catch (EntityProcessorException e) { + logger.error( + "Cannot process the value {} in a time based value.", timeBasedValue.getValue(), e); + return Optional.empty(); + } + + /* Mix everything together */ + topLevelResult.putAll(valueResult); + return Optional.of(topLevelResult); + } + + @Override + protected Optional handleProcessorSpecificQuantity( + Quantity quantity, String fieldName) { + throw new UnsupportedOperationException("No specific quantity handling needed here!"); + } + + @Override + public String[] getHeaderElements() { + return headerElements; + } + + @Override + protected List> getEligibleEntityClasses() { + return eligibleClasses; + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/processor/result/ResultEntityProcessor.java b/src/main/java/edu/ie3/datamodel/io/processor/result/ResultEntityProcessor.java index 15bf00696..8d652ac03 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/result/ResultEntityProcessor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/result/ResultEntityProcessor.java @@ -94,7 +94,7 @@ protected Optional handleProcessorSpecificQuantity( } @Override - protected List> getAllEligibleClasses() { + protected List> getEligibleEntityClasses() { return eligibleEntityClasses; } } diff --git a/src/main/java/edu/ie3/datamodel/models/UniqueEntity.java b/src/main/java/edu/ie3/datamodel/models/UniqueEntity.java index b89f0dba7..85c6ef2ce 100644 --- a/src/main/java/edu/ie3/datamodel/models/UniqueEntity.java +++ b/src/main/java/edu/ie3/datamodel/models/UniqueEntity.java @@ -10,6 +10,9 @@ /** Supplies every subclass with a generated UUID, making it unique */ public abstract class UniqueEntity { + /** Field name of {@link UniqueEntity} uuid */ + public static final String UUID_FIELD_NAME = "uuid"; + private final UUID uuid; public UniqueEntity() { diff --git a/src/test/groovy/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessorTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessorTest.groovy new file mode 100644 index 000000000..fc02d146a --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessorTest.groovy @@ -0,0 +1,51 @@ +package edu.ie3.datamodel.io.processor.input + +import edu.ie3.util.TimeTools +import sun.awt.image.ImageWatched + +import java.time.ZoneId +import java.time.ZonedDateTime + +import static edu.ie3.util.quantities.PowerSystemUnits.* + +import edu.ie3.datamodel.models.value.EnergyPriceValue +import edu.ie3.datamodel.models.value.TimeBasedValue +import spock.lang.Specification +import tec.uom.se.quantity.Quantities + +/** + * Tests the behaviour of the processor for time based values. The time based value is a unique entity and therefore + * has a uuid. In common case, the uuid will not be provided explicitly during model generation. Therefore, here only + * the presence of that field ist tested and not the specific value. + */ +class TimeBasedValueProcessorTest extends Specification { + static { + TimeTools.initialize(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd HH:mm:ss") + } + + def "The TimeBasedValueProcessor should de-serialize a provided time based EnergyPriceValue correctly"() { + given: + TimeBasedValueProcessor processor = new TimeBasedValueProcessor<>(EnergyPriceValue.class) + EnergyPriceValue value = new EnergyPriceValue(Quantities.getQuantity(43.21, EURO_PER_MEGAWATTHOUR)) + ZonedDateTime time = TimeTools.toZonedDateTime("2020-03-27 15:29:14") + TimeBasedValue timeBasedValue = new TimeBasedValue<>(time, value) + Map expected = [ + "uuid" : "has random uuid", + "time" : "2020-03-27 15:29:14", + "price" : "43.21" + ] + + when: + Optional> actual = processor.handleEntity(timeBasedValue) + + then: + actual.isPresent() + LinkedHashMap result = actual.get() + expected.forEach { k, v -> + if(k == "uuid") + assert result.containsKey(k) + else + assert (v == result.get(k)) + } + } +} From 9dc2fe23029599ef07e0d594e6618f365620e2c1 Mon Sep 17 00:00:00 2001 From: "Kittl, Chris" Date: Mon, 30 Mar 2020 14:10:51 +0200 Subject: [PATCH 02/13] Altering the layout of uml diagram for time series --- docs/uml/main/InputDatamodelConcept.puml | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/docs/uml/main/InputDatamodelConcept.puml b/docs/uml/main/InputDatamodelConcept.puml index e32f7fcbb..dca5863be 100644 --- a/docs/uml/main/InputDatamodelConcept.puml +++ b/docs/uml/main/InputDatamodelConcept.puml @@ -368,39 +368,39 @@ Class PowerValue { - p: Quantity [kW] - q: Quantity [kVA] } -PowerValue --|> Value +Value <|-- PowerValue Class HeatAndPowerValue { - heatDemand: Quantity [kW] } -HeatAndPowerValue --|> PowerValue +PowerValue <|-- HeatAndPowerValue Class HeatDemandValue { - heatDemand: Quantity [kW] } -HeatDemandValue --|> Value +Value <|-- HeatDemandValue Class EnergyPriceValue { - price: Quantity [€/MWh] } -EnergyPriceValue --|> Value +Value <|-- EnergyPriceValue Class IrradiationValue { - directIrradiation: Quantity - diffuseIrradiation: Quantity } -IrradiationValue --|> Value +Value <|-- IrradiationValue Class TemperatureValue { - temperature: Quantity } -TemperatureValue --|> Value +Value <|-- TemperatureValue Class WindValue { - direction: Quantity - velocity: Quantity } -WindValue --|> Value +Value <|-- WindValue Class WeatherValue { - coordinate: Point @@ -408,7 +408,7 @@ Class WeatherValue { - temperature: TemperatureValue - wind: WindValue } -WeatherValue --|> Value +Value <|-- WeatherValue WeatherValue --* IrradiationValue WeatherValue --* TemperatureValue WeatherValue --* WindValue @@ -440,7 +440,6 @@ Abstract Class TimeSeries { + T getValue(ZonedDateTime) } TimeSeries --|> UniqueEntity -TimeSeries "1" --* "n" TimeBasedValue Class IndividualTimeSeries { - timeToTimeBasedValue:HashMap> @@ -449,6 +448,7 @@ Class IndividualTimeSeries { + addAll(Map): void } IndividualTimeSeries --|> TimeSeries +IndividualTimeSeries "n" --* "1" TimeBasedValue Class PowerTimeSeries { } @@ -466,6 +466,7 @@ Abstract Class RepetitiveTimeSeries { + calc(ZonedDateTime): T } RepetitiveTimeSeries --|> TimeSeries +RepetitiveTimeSeries "n" --* "1" Value Class LoadProfileInput { + profile: LoadProfileType From 6b6fb60d996a7e7afb7d456f930cf2075b555b0d Mon Sep 17 00:00:00 2001 From: "Kittl, Chris" Date: Mon, 30 Mar 2020 18:05:29 +0200 Subject: [PATCH 03/13] Adapting time series to immutability concept --- .../ie3/datamodel/models/UniqueEntity.java | 2 +- .../models/input/LoadProfileInput.java | 27 +++++-- .../models/timeseries/HeatAndPTimeSeries.java | 9 ++- .../models/timeseries/HeatAndSTimeSeries.java | 9 ++- .../timeseries/HeatDemandTimeSeries.java | 10 ++- .../timeseries/IndividualTimeSeries.java | 74 ++++++------------- .../models/timeseries/PTimeSeries.java | 9 ++- .../timeseries/RepetitiveTimeSeries.java | 29 -------- .../models/timeseries/STimeSeries.java | 9 ++- .../models/timeseries/TimeSeries.java | 48 +++++++----- .../models/value/TimeBasedValue.java | 8 +- 11 files changed, 124 insertions(+), 110 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/models/UniqueEntity.java b/src/main/java/edu/ie3/datamodel/models/UniqueEntity.java index 85c6ef2ce..5f6b4d38f 100644 --- a/src/main/java/edu/ie3/datamodel/models/UniqueEntity.java +++ b/src/main/java/edu/ie3/datamodel/models/UniqueEntity.java @@ -13,7 +13,7 @@ public abstract class UniqueEntity { /** Field name of {@link UniqueEntity} uuid */ public static final String UUID_FIELD_NAME = "uuid"; - private final UUID uuid; + protected final UUID uuid; public UniqueEntity() { uuid = UUID.randomUUID(); diff --git a/src/main/java/edu/ie3/datamodel/models/input/LoadProfileInput.java b/src/main/java/edu/ie3/datamodel/models/input/LoadProfileInput.java index 3696a87e7..90829de9e 100644 --- a/src/main/java/edu/ie3/datamodel/models/input/LoadProfileInput.java +++ b/src/main/java/edu/ie3/datamodel/models/input/LoadProfileInput.java @@ -13,6 +13,7 @@ import java.time.DayOfWeek; import java.time.ZonedDateTime; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.UUID; @@ -44,12 +45,12 @@ public PValue calc(ZonedDateTime time) { } @Override - protected Optional getPreviousZonedDateTime(ZonedDateTime time) { + protected Optional getPreviousDateTime(ZonedDateTime time) { return Optional.of(time.minus(1, HOURS)); } @Override - protected Optional getNextZonedDateTime(ZonedDateTime time) { + protected Optional getNextDateTime(ZonedDateTime time) { return Optional.of(time.plus(1, HOURS)); } @@ -60,10 +61,26 @@ public StandardLoadProfile getType() { @Override public String toString() { return "LoadProfileInput{" - + "type=" + + "uuid=" + + uuid + + ", type=" + type - + ", dayOfWeekToHourlyValues=" - + dayOfWeekToHourlyValues + + ", #entries=" + + dayOfWeekToHourlyValues.size() + '}'; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + LoadProfileInput that = (LoadProfileInput) o; + return type.equals(that.type) && dayOfWeekToHourlyValues.equals(that.dayOfWeekToHourlyValues); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), type, dayOfWeekToHourlyValues); + } } diff --git a/src/main/java/edu/ie3/datamodel/models/timeseries/HeatAndPTimeSeries.java b/src/main/java/edu/ie3/datamodel/models/timeseries/HeatAndPTimeSeries.java index 95624f9da..a77c18c31 100644 --- a/src/main/java/edu/ie3/datamodel/models/timeseries/HeatAndPTimeSeries.java +++ b/src/main/java/edu/ie3/datamodel/models/timeseries/HeatAndPTimeSeries.java @@ -6,6 +6,13 @@ package edu.ie3.datamodel.models.timeseries; import edu.ie3.datamodel.models.value.HeatAndPValue; +import edu.ie3.datamodel.models.value.TimeBasedValue; +import java.util.Collection; +import java.util.UUID; /** Time series, that holds active power and heat demand values for each time step */ -public class HeatAndPTimeSeries extends IndividualTimeSeries {} +public class HeatAndPTimeSeries extends IndividualTimeSeries { + public HeatAndPTimeSeries(UUID uuid, Collection> timeBasedValues) { + super(uuid, timeBasedValues); + } +} diff --git a/src/main/java/edu/ie3/datamodel/models/timeseries/HeatAndSTimeSeries.java b/src/main/java/edu/ie3/datamodel/models/timeseries/HeatAndSTimeSeries.java index bb806db51..73729b310 100644 --- a/src/main/java/edu/ie3/datamodel/models/timeseries/HeatAndSTimeSeries.java +++ b/src/main/java/edu/ie3/datamodel/models/timeseries/HeatAndSTimeSeries.java @@ -6,6 +6,13 @@ package edu.ie3.datamodel.models.timeseries; import edu.ie3.datamodel.models.value.HeatAndSValue; +import edu.ie3.datamodel.models.value.TimeBasedValue; +import java.util.Collection; +import java.util.UUID; /** Time series, that holds apparent power and heat demand values for each time step */ -public class HeatAndSTimeSeries extends IndividualTimeSeries {} +public class HeatAndSTimeSeries extends IndividualTimeSeries { + public HeatAndSTimeSeries(UUID uuid, Collection> timeBasedValues) { + super(uuid, timeBasedValues); + } +} diff --git a/src/main/java/edu/ie3/datamodel/models/timeseries/HeatDemandTimeSeries.java b/src/main/java/edu/ie3/datamodel/models/timeseries/HeatDemandTimeSeries.java index 378526c36..6ef0a6624 100644 --- a/src/main/java/edu/ie3/datamodel/models/timeseries/HeatDemandTimeSeries.java +++ b/src/main/java/edu/ie3/datamodel/models/timeseries/HeatDemandTimeSeries.java @@ -6,5 +6,13 @@ package edu.ie3.datamodel.models.timeseries; import edu.ie3.datamodel.models.value.HeatDemandValue; +import edu.ie3.datamodel.models.value.TimeBasedValue; +import java.util.Collection; +import java.util.UUID; -public class HeatDemandTimeSeries extends IndividualTimeSeries {} +public class HeatDemandTimeSeries extends IndividualTimeSeries { + public HeatDemandTimeSeries( + UUID uuid, Collection> timeBasedValues) { + super(uuid, timeBasedValues); + } +} diff --git a/src/main/java/edu/ie3/datamodel/models/timeseries/IndividualTimeSeries.java b/src/main/java/edu/ie3/datamodel/models/timeseries/IndividualTimeSeries.java index 39690454a..095280518 100644 --- a/src/main/java/edu/ie3/datamodel/models/timeseries/IndividualTimeSeries.java +++ b/src/main/java/edu/ie3/datamodel/models/timeseries/IndividualTimeSeries.java @@ -9,79 +9,53 @@ import edu.ie3.datamodel.models.value.Value; import java.time.ZonedDateTime; import java.util.*; +import java.util.stream.Collectors; /** Describes a TimeSeries with individual values per time step */ public class IndividualTimeSeries extends TimeSeries { /** Maps a time to its respective value to retrieve faster */ - private HashMap timeToValue = new HashMap<>(); + private Map> timeToValue; - public IndividualTimeSeries() { - super(); - } - - public IndividualTimeSeries(UUID uuid) { + public IndividualTimeSeries(UUID uuid, Collection> values) { super(uuid); - } - public IndividualTimeSeries(UUID uuid, Map timeToValue) { - super(uuid); - addAll(timeToValue); - } - - public IndividualTimeSeries(Map timeToValue) { - super(); - addAll(timeToValue); + timeToValue = + values.stream() + .collect(Collectors.toMap(TimeBasedValue::getTime, timeBasedValue -> timeBasedValue)); } /** - * Adding a map from {@link ZonedDateTime} to the value apparent a this point in time + * Returns the sorted set of all entries known to this time series * - * @param map The map that should be added + * @return An unmodifiable sorted set of all known time based values of this time series */ - public void addAll(Map map) { - map.forEach(this::add); + public SortedSet> getAllEntries() { + TreeSet> sortedEntries = new TreeSet<>(timeToValue.values()); + return Collections.unmodifiableSortedSet(sortedEntries); } - /** - * Adds an entry time -> value to the internal map - * - * @param time of this value - * @param value The actual value - */ - public void add(ZonedDateTime time, T value) { - timeToValue.put(time, value); - } - - /** - * Adds the individual value to the internal map - * - * @param timeBasedValue A value with time information - */ - public void add(TimeBasedValue timeBasedValue) { - this.add(timeBasedValue.getTime(), timeBasedValue.getValue()); + @Override + public Optional> getTimeBasedValue(ZonedDateTime time) { + return Optional.ofNullable(timeToValue.get(time)); } @Override public Optional getValue(ZonedDateTime time) { - return Optional.ofNullable(timeToValue.get(time)); + return getTimeBasedValue(time).map(TimeBasedValue::getValue); } @Override - public Optional> getPreviousTimeBasedValue(ZonedDateTime time) { - Optional lastZdt = - timeToValue.keySet().stream() - .filter(valueTime -> valueTime.compareTo(time) <= 0) - .max(Comparator.naturalOrder()); - return getTimeBasedValue(lastZdt); + protected Optional getPreviousDateTime(ZonedDateTime time) { + return timeToValue.keySet().stream() + .filter(valueTime -> valueTime.compareTo(time) <= 0) + .max(Comparator.naturalOrder()); } @Override - public Optional> getNextTimeBasedValue(ZonedDateTime time) { - Optional nextZdt = - timeToValue.keySet().stream() - .filter(valueTime -> valueTime.compareTo(time) >= 0) - .min(Comparator.naturalOrder()); - return getTimeBasedValue(nextZdt); + protected Optional getNextDateTime(ZonedDateTime time) { + return timeToValue.keySet().stream() + .filter(valueTime -> valueTime.compareTo(time) >= 0) + .min(Comparator.naturalOrder()); } @Override @@ -100,6 +74,6 @@ public int hashCode() { @Override public String toString() { - return "IndividualTimeSeries{" + "timeToValue=" + timeToValue + '}'; + return "IndividualTimeSeries{" + "uuid=" + getUuid() + ", #entries=" + timeToValue.size() + '}'; } } diff --git a/src/main/java/edu/ie3/datamodel/models/timeseries/PTimeSeries.java b/src/main/java/edu/ie3/datamodel/models/timeseries/PTimeSeries.java index 1900d7aee..d9a3a3984 100644 --- a/src/main/java/edu/ie3/datamodel/models/timeseries/PTimeSeries.java +++ b/src/main/java/edu/ie3/datamodel/models/timeseries/PTimeSeries.java @@ -6,6 +6,13 @@ package edu.ie3.datamodel.models.timeseries; import edu.ie3.datamodel.models.value.PValue; +import edu.ie3.datamodel.models.value.TimeBasedValue; +import java.util.Collection; +import java.util.UUID; /** Time series, that holds active power values for each time step */ -public class PTimeSeries extends IndividualTimeSeries {} +public class PTimeSeries extends IndividualTimeSeries { + public PTimeSeries(UUID uuid, Collection> timeBasedValues) { + super(uuid, timeBasedValues); + } +} diff --git a/src/main/java/edu/ie3/datamodel/models/timeseries/RepetitiveTimeSeries.java b/src/main/java/edu/ie3/datamodel/models/timeseries/RepetitiveTimeSeries.java index 5a1a63f56..1bbf00ec2 100644 --- a/src/main/java/edu/ie3/datamodel/models/timeseries/RepetitiveTimeSeries.java +++ b/src/main/java/edu/ie3/datamodel/models/timeseries/RepetitiveTimeSeries.java @@ -5,7 +5,6 @@ */ package edu.ie3.datamodel.models.timeseries; -import edu.ie3.datamodel.models.value.TimeBasedValue; import edu.ie3.datamodel.models.value.Value; import java.time.ZonedDateTime; import java.util.Optional; @@ -28,32 +27,4 @@ public RepetitiveTimeSeries(UUID uuid) { public Optional getValue(ZonedDateTime time) { return Optional.of(calc(time)); } - - /** - * Get the lastly known zoned date time previous to the provided time with available values. If - * the queried time is equals to the lastly known time, that one is returned. - * - * @param time Queried time - * @return lastly known zoned date time with available values - */ - protected abstract Optional getPreviousZonedDateTime(ZonedDateTime time); - - @Override - public Optional> getPreviousTimeBasedValue(ZonedDateTime time) { - return getTimeBasedValue(getPreviousZonedDateTime(time)); - } - - /** - * Get the next upcoming zoned date time with available values. If it is the queried time, that - * one is returned. - * - * @param time Queried time - * @return next upcoming zoned date time with available values - */ - protected abstract Optional getNextZonedDateTime(ZonedDateTime time); - - @Override - public Optional> getNextTimeBasedValue(ZonedDateTime time) { - return getTimeBasedValue(getNextZonedDateTime(time)); - } } diff --git a/src/main/java/edu/ie3/datamodel/models/timeseries/STimeSeries.java b/src/main/java/edu/ie3/datamodel/models/timeseries/STimeSeries.java index ffdaee7da..723cac22f 100644 --- a/src/main/java/edu/ie3/datamodel/models/timeseries/STimeSeries.java +++ b/src/main/java/edu/ie3/datamodel/models/timeseries/STimeSeries.java @@ -6,6 +6,13 @@ package edu.ie3.datamodel.models.timeseries; import edu.ie3.datamodel.models.value.SValue; +import edu.ie3.datamodel.models.value.TimeBasedValue; +import java.util.Collection; +import java.util.UUID; /** Time series, that holds apparent power values for each time step */ -public class STimeSeries extends IndividualTimeSeries {} +public class STimeSeries extends IndividualTimeSeries { + public STimeSeries(UUID uuid, Collection> timeBasedValues) { + super(uuid, timeBasedValues); + } +} diff --git a/src/main/java/edu/ie3/datamodel/models/timeseries/TimeSeries.java b/src/main/java/edu/ie3/datamodel/models/timeseries/TimeSeries.java index 9186aad46..8e13f1900 100644 --- a/src/main/java/edu/ie3/datamodel/models/timeseries/TimeSeries.java +++ b/src/main/java/edu/ie3/datamodel/models/timeseries/TimeSeries.java @@ -13,7 +13,7 @@ import java.util.UUID; /** Describes a Series of {@link edu.ie3.datamodel.models.value.Value values} */ -abstract class TimeSeries extends UniqueEntity { +public abstract class TimeSeries extends UniqueEntity { public TimeSeries() { super(); @@ -23,16 +23,6 @@ public TimeSeries(UUID uuid) { super(uuid); } - /** @return the value at the given time step as a TimeBasedValue */ - protected Optional> getTimeBasedValue(Optional optionalTime) { - if (optionalTime.isPresent()) { - ZonedDateTime zdt = optionalTime.get(); - return getTimeBasedValue(zdt); - } else { - return Optional.empty(); - } - } - /** @return the value at the given time step as a TimeBasedValue */ public Optional> getTimeBasedValue(ZonedDateTime time) { T content = getValue(time).orElse(null); @@ -44,14 +34,6 @@ public Optional> getTimeBasedValue(ZonedDateTime time) { } } - /** - * @return the most recent available value before or at the given time step as a TimeBasedValue - */ - public abstract Optional> getPreviousTimeBasedValue(ZonedDateTime time); - - /** @return the next available value after or at the given time step as a TimeBasedValue */ - public abstract Optional> getNextTimeBasedValue(ZonedDateTime time); - /** * If you prefer to keep the time with the value, please use {@link TimeSeries#getTimeBasedValue} * instead @@ -59,4 +41,32 @@ public Optional> getTimeBasedValue(ZonedDateTime time) { * @return An option on the raw value at the given time step */ public abstract Optional getValue(ZonedDateTime time); + + /** + * Get the next earlier known time instant + * + * @param time Reference in time + * @return The next earlier known time instant + */ + protected abstract Optional getPreviousDateTime(ZonedDateTime time); + + /** + * Get the next later known time instant + * + * @param time Reference in time + * @return The next later known time instant + */ + protected abstract Optional getNextDateTime(ZonedDateTime time); + + /** + * @return the most recent available value before or at the given time step as a TimeBasedValue + */ + public Optional> getPreviousTimeBasedValue(ZonedDateTime time) { + return getPreviousDateTime(time).map(this::getPreviousTimeBasedValue).map(Optional::get); + } + + /** @return the next available value after or at the given time step as a TimeBasedValue */ + public Optional> getNextTimeBasedValue(ZonedDateTime time) { + return getNextDateTime(time).map(this::getPreviousTimeBasedValue).map(Optional::get); + } } diff --git a/src/main/java/edu/ie3/datamodel/models/value/TimeBasedValue.java b/src/main/java/edu/ie3/datamodel/models/value/TimeBasedValue.java index ae9be5501..6478246be 100644 --- a/src/main/java/edu/ie3/datamodel/models/value/TimeBasedValue.java +++ b/src/main/java/edu/ie3/datamodel/models/value/TimeBasedValue.java @@ -14,7 +14,8 @@ * * @param type of value */ -public class TimeBasedValue extends UniqueEntity { +public class TimeBasedValue extends UniqueEntity + implements Comparable> { private T value; @@ -59,4 +60,9 @@ public int hashCode() { public String toString() { return "TimeBasedValue{" + value + "}@" + time; } + + @Override + public int compareTo(TimeBasedValue tTimeBasedValue) { + return this.time.compareTo(tTimeBasedValue.getTime()); + } } From ad48cb6db47974270a72614bd647ef35b6f46083 Mon Sep 17 00:00:00 2001 From: "Kittl, Chris" Date: Mon, 30 Mar 2020 18:07:57 +0200 Subject: [PATCH 04/13] First approach in de-serialising time series --- .../exceptions/DeserializationException.java | 18 ++++ .../deserialize/TimeSeriesDeserializer.java | 88 +++++++++++++++++ .../input/TimeBasedValueProcessor.java | 65 ++++--------- .../io/processor/input/ValueProcessor.java | 94 +++++++++++++++++++ 4 files changed, 217 insertions(+), 48 deletions(-) create mode 100644 src/main/java/edu/ie3/datamodel/exceptions/DeserializationException.java create mode 100644 src/main/java/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializer.java create mode 100644 src/main/java/edu/ie3/datamodel/io/processor/input/ValueProcessor.java diff --git a/src/main/java/edu/ie3/datamodel/exceptions/DeserializationException.java b/src/main/java/edu/ie3/datamodel/exceptions/DeserializationException.java new file mode 100644 index 000000000..59e616e7a --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/exceptions/DeserializationException.java @@ -0,0 +1,18 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.exceptions; + +public class DeserializationException extends Exception { + public DeserializationException(String message, Object deserializationObject) { + super(message + "\nAffected object to be deseralized: " + deserializationObject.toString()); + } + + public DeserializationException(String message, Object deserializationObject, Throwable cause) { + super( + message + "\nAffected object to be deseralized: " + deserializationObject.toString(), + cause); + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializer.java b/src/main/java/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializer.java new file mode 100644 index 000000000..264985b2e --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializer.java @@ -0,0 +1,88 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.deserialize; + +import edu.ie3.datamodel.exceptions.DeserializationException; +import edu.ie3.datamodel.io.processor.input.TimeBasedValueProcessor; +import edu.ie3.datamodel.io.processor.input.ValueProcessor; +import edu.ie3.datamodel.models.input.LoadProfileInput; +import edu.ie3.datamodel.models.timeseries.IndividualTimeSeries; +import edu.ie3.datamodel.models.timeseries.TimeSeries; +import edu.ie3.datamodel.models.value.TimeBasedValue; +import edu.ie3.datamodel.models.value.Value; +import java.util.*; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TimeSeriesDeserializer { + private static final Logger logger = LoggerFactory.getLogger(TimeSeriesDeserializer.class); + + public void deserialize(TimeSeries timeSeries) + throws DeserializationException { + /* Distinguish between individual and repetitive time series */ + if (timeSeries instanceof IndividualTimeSeries) { + IndividualTimeSeries individualTimeSeries = (IndividualTimeSeries) timeSeries; + + /* Get all entries */ + TimeBasedValueProcessor timeBasedValueProcessor = new TimeBasedValueProcessor(); + SortedSet> entries = individualTimeSeries.getAllEntries(); + Set> result = + Collections.unmodifiableSet( + entries.stream() + .map( + timeBasedValue -> { + /* Build the mapping from field name to value for the containing class */ + Optional> outerResult = + timeBasedValueProcessor.handleEntity(timeBasedValue); + if (!outerResult.isPresent()) { + logger.error( + "Cannot deserialize a time based value \"{}\".", timeBasedValue); + return new HashMap(); + } + + ValueProcessor valueProcessor = + new ValueProcessor( + (Class) timeBasedValue.getValue().getClass()); + Optional> innerResult = + valueProcessor.handleEntity(timeBasedValue.getValue()); + if (!innerResult.isPresent()) { + logger.error( + "Cannot deserialize a time value \"{}\".", timeBasedValue.getValue()); + return new HashMap(); + } + + LinkedHashMap interMediateResult = outerResult.get(); + interMediateResult.putAll(innerResult.get()); + return Collections.unmodifiableMap(interMediateResult); + }) + .collect(Collectors.toSet())); + + // TODO: Writing the result + } else { + /* As repetitive time series as only abstract, determine the concrete type */ + if (timeSeries instanceof LoadProfileInput) { + LoadProfileInput loadProfile = (LoadProfileInput) timeSeries; + throw new DeserializationException( + "The deserialisation of LoadProleInput is not implemented, yet.", loadProfile); + + /* + * Steps to implement + * 1) Determine the "unique" table entries as a combination of "credentials" + * and edu.ie3.datamodel.models.value.Value + * 2) Build field name to value mapping for credentials and values independently + * 3) Combine the mapping + * 4) Write the result + */ + } else { + throw new DeserializationException( + "There is no deserialization routine defined for a time series of type " + + timeSeries.getClass().getSimpleName(), + timeSeries); + } + } + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessor.java b/src/main/java/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessor.java index e5bdc12b9..67339e148 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessor.java @@ -7,49 +7,37 @@ import edu.ie3.datamodel.exceptions.EntityProcessorException; import edu.ie3.datamodel.io.processor.Processor; -import edu.ie3.datamodel.models.value.EnergyPriceValue; import edu.ie3.datamodel.models.value.TimeBasedValue; import edu.ie3.datamodel.models.value.Value; import edu.ie3.util.TimeTools; import java.lang.reflect.Method; import java.time.ZoneId; import java.util.*; -import java.util.stream.Stream; import javax.measure.Quantity; /** - * Processor for time based values. It "unboxes" the underlying value and joins the field name to - * value mapping of the time based container and the value itself. - * - * @param Type parameter of the contained {@link Value} + * Processor for time based values. It only handles the time based value itself. The included value + * must be treated separately. Therefore using the raw type is okay here. */ -public class TimeBasedValueProcessor extends Processor { +@SuppressWarnings("rawtypes") +public class TimeBasedValueProcessor extends Processor { - public static final List> eligibleClasses = - Collections.unmodifiableList(Collections.singletonList(EnergyPriceValue.class)); + public static final List> eligibleClasses = + Collections.unmodifiableList(Collections.singletonList(TimeBasedValue.class)); - private final SortedMap topLevelFieldNameToGetter; - private final SortedMap valueFieldNameToGetter; + private final SortedMap fieldNameToGetter; private final String[] headerElements; - /** - * Constructs the processor and registers the foreseen class of the contained {@link Value} - * - * @param foreSeenClass Foreseen class to be contained in this time based value - */ - public TimeBasedValueProcessor(Class foreSeenClass) { - super(foreSeenClass); + /** Constructs the processor and registers the foreseen class */ + public TimeBasedValueProcessor() { + super(TimeBasedValue.class); /* Build a mapping from field name to getter method, disjoint for TimeBasedValue and the value itself */ - this.topLevelFieldNameToGetter = + this.fieldNameToGetter = mapFieldNameToGetter(TimeBasedValue.class, Collections.singletonList("value")); - this.valueFieldNameToGetter = mapFieldNameToGetter(foreSeenClass); - /* Flatten the collected field name */ - this.headerElements = - Stream.of(topLevelFieldNameToGetter.keySet(), valueFieldNameToGetter.keySet()) - .flatMap(Set::stream) - .toArray(String[]::new); + /* Collect the field names */ + this.headerElements = fieldNameToGetter.keySet().toArray(new String[0]); TimeTools.initialize(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd HH:mm:ss"); } @@ -61,36 +49,17 @@ public TimeBasedValueProcessor(Class foreSeenClass) { * @param timeBasedValue The entity to handle * @return An option on a mapping from field name to field value as String representation */ - public Optional> handleEntity(TimeBasedValue timeBasedValue) { - if (!registeredClass.equals(timeBasedValue.getValue().getClass())) - throw new EntityProcessorException( - "Cannot process " - + timeBasedValue.getValue().getClass().getSimpleName() - + ".class with this EntityProcessor. Please either provide an element of " - + registeredClass.getSimpleName() - + ".class or create a new factory for " - + timeBasedValue.getClass().getSimpleName() - + ".class!"); - + public Optional> handleEntity( + TimeBasedValue timeBasedValue) { /* Process both entities disjoint */ LinkedHashMap topLevelResult; - LinkedHashMap valueResult; try { - topLevelResult = processObject(timeBasedValue, topLevelFieldNameToGetter); + topLevelResult = processObject(timeBasedValue, fieldNameToGetter); } catch (EntityProcessorException e) { logger.error("Cannot process the time based value {} itself.", timeBasedValue, e); return Optional.empty(); } - try { - valueResult = processObject(timeBasedValue.getValue(), valueFieldNameToGetter); - } catch (EntityProcessorException e) { - logger.error( - "Cannot process the value {} in a time based value.", timeBasedValue.getValue(), e); - return Optional.empty(); - } - /* Mix everything together */ - topLevelResult.putAll(valueResult); return Optional.of(topLevelResult); } @@ -106,7 +75,7 @@ public String[] getHeaderElements() { } @Override - protected List> getEligibleEntityClasses() { + protected List> getEligibleEntityClasses() { return eligibleClasses; } } diff --git a/src/main/java/edu/ie3/datamodel/io/processor/input/ValueProcessor.java b/src/main/java/edu/ie3/datamodel/io/processor/input/ValueProcessor.java new file mode 100644 index 000000000..c42ce9e3f --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/processor/input/ValueProcessor.java @@ -0,0 +1,94 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.processor.input; + +import edu.ie3.datamodel.exceptions.EntityProcessorException; +import edu.ie3.datamodel.io.processor.Processor; +import edu.ie3.datamodel.models.value.EnergyPriceValue; +import edu.ie3.datamodel.models.value.Value; +import edu.ie3.util.TimeTools; +import java.lang.reflect.Method; +import java.time.ZoneId; +import java.util.*; +import javax.measure.Quantity; + +/** + * Processor for time based values. It "unboxes" the underlying value and joins the field name to + * value mapping of the time based container and the value itself. + * + * @param Type parameter of the contained {@link Value} + */ +public class ValueProcessor extends Processor { + + public static final List> eligibleClasses = + Collections.unmodifiableList(Collections.singletonList(EnergyPriceValue.class)); + + private final SortedMap fieldNameToGetter; + private final String[] headerElements; + + /** + * Constructs the processor and registers the foreseen class of the {@link Value} + * + * @param foreSeenClass Foreseen class to be contained in this time based value + */ + public ValueProcessor(Class foreSeenClass) { + super(foreSeenClass); + + /* Build a mapping from field name to getter method, disjoint for TimeBasedValue and the value itself */ + this.fieldNameToGetter = mapFieldNameToGetter(foreSeenClass); + + /* Flatten the collected field name */ + this.headerElements = fieldNameToGetter.keySet().toArray(new String[0]); + + TimeTools.initialize(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd HH:mm:ss"); + } + + /** + * Handles a given time value and returns an option on a mapping from field name on field value as + * String representation + * + * @param value The entity to handle + * @return An option on a mapping from field name to field value as String representation + */ + public Optional> handleEntity(T value) { + if (!registeredClass.equals(value.getClass())) + throw new EntityProcessorException( + "Cannot process " + + value.getClass().getSimpleName() + + ".class with this EntityProcessor. Please either provide an element of " + + registeredClass.getSimpleName() + + ".class or create a new factory for " + + value.getClass().getSimpleName() + + ".class!"); + + /* Process both entities disjoint */ + LinkedHashMap result; + try { + result = processObject(value, fieldNameToGetter); + } catch (EntityProcessorException e) { + logger.error("Cannot process the value {}.", value, e); + return Optional.empty(); + } + + return Optional.of(result); + } + + @Override + protected Optional handleProcessorSpecificQuantity( + Quantity quantity, String fieldName) { + throw new UnsupportedOperationException("No specific quantity handling needed here!"); + } + + @Override + public String[] getHeaderElements() { + return headerElements; + } + + @Override + protected List> getEligibleEntityClasses() { + return eligibleClasses; + } +} From 1106ad42a4a1c38543e3d6724110def2f5be13c9 Mon Sep 17 00:00:00 2001 From: "Kittl, Chris" Date: Tue, 31 Mar 2020 18:06:15 +0200 Subject: [PATCH 05/13] Altering csv file sink to process field to value mapping instead of entities --- .../ie3/datamodel/io/CsvFileDefinition.java | 30 +++ .../edu/ie3/datamodel/io/Destination.java | 9 + .../edu/ie3/datamodel/io/FileDefinition.java | 98 +++++++++ .../io/connectors/CsvFileConnector.java | 189 +++++++++++------- .../ie3/datamodel/io/sink/CsvFileSink.java | 114 ++++++----- .../edu/ie3/datamodel/io/sink/DataSink.java | 15 +- .../datamodel/io/sink/CsvFileSinkTest.groovy | 147 +++++++++----- 7 files changed, 408 insertions(+), 194 deletions(-) create mode 100644 src/main/java/edu/ie3/datamodel/io/CsvFileDefinition.java create mode 100644 src/main/java/edu/ie3/datamodel/io/Destination.java create mode 100644 src/main/java/edu/ie3/datamodel/io/FileDefinition.java diff --git a/src/main/java/edu/ie3/datamodel/io/CsvFileDefinition.java b/src/main/java/edu/ie3/datamodel/io/CsvFileDefinition.java new file mode 100644 index 000000000..6ef4d1cf7 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/CsvFileDefinition.java @@ -0,0 +1,30 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io; + +import java.util.Arrays; + +public class CsvFileDefinition extends FileDefinition { + private static final String EXTENSION = "csv"; + + public CsvFileDefinition(String fileName, String[] headLineElements) { + super(fileName, EXTENSION, headLineElements); + } + + @Override + public String toString() { + return "CsvFileDefinition{" + + "fileName='" + + fileName + + '\'' + + ", fileExtension='" + + fileExtension + + '\'' + + ", headLineElements=" + + Arrays.toString(headLineElements) + + '}'; + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/Destination.java b/src/main/java/edu/ie3/datamodel/io/Destination.java new file mode 100644 index 000000000..9df6dc09f --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/Destination.java @@ -0,0 +1,9 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io; + +/** Interface to denote where some piece of information is supposed to end */ +public interface Destination {} diff --git a/src/main/java/edu/ie3/datamodel/io/FileDefinition.java b/src/main/java/edu/ie3/datamodel/io/FileDefinition.java new file mode 100644 index 000000000..bf5983481 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/FileDefinition.java @@ -0,0 +1,98 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io; + +import java.io.File; +import java.util.Arrays; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Class to contain all relevant information to prepare a file for reading or writing objects to / + * from + */ +public class FileDefinition implements Destination { + private static final Pattern fileNamePattern = Pattern.compile("[\\w\\\\/]+"); + private static final Pattern extensionPattern = Pattern.compile("\\.?([\\w\\.]+)$"); + private static final Pattern fullPathPattern = + Pattern.compile("(" + fileNamePattern.pattern() + ")\\.+(\\w+)"); + + protected final String fileName; + protected final String fileExtension; + protected final String[] headLineElements; + + public FileDefinition(String fileName, String fileExtension, String[] headLineElements) { + if (fileName.matches(fullPathPattern.pattern())) { + Matcher matcher = extensionPattern.matcher(fileExtension); + matcher.matches(); + this.fileName = matcher.group(0).replaceAll("\\\\/", File.separator); + } else if (fileName.matches(fileNamePattern.pattern())) { + this.fileName = fileName.replaceAll("\\\\/", File.separator); + } else { + throw new IllegalArgumentException( + "The file name \"" + fileName + "\" is no valid file name."); + } + + if (fileExtension.matches(fullPathPattern.pattern())) { + Matcher matcher = extensionPattern.matcher(fileExtension); + matcher.matches(); + this.fileExtension = matcher.group(2).replaceAll("\\.", ""); + } else if (fileName.matches(extensionPattern.pattern())) { + Matcher matcher = extensionPattern.matcher(fileExtension); + matcher.matches(); + this.fileExtension = matcher.group(0).replaceAll("\\.", ""); + } else { + throw new IllegalArgumentException( + "The extension \"" + fileExtension + "\" is no valid file extension."); + } + + this.headLineElements = headLineElements; + } + + public String getFileName() { + return fileName; + } + + public String getFileExtension() { + return fileExtension; + } + + public String[] getHeadLineElements() { + return headLineElements; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileDefinition that = (FileDefinition) o; + return fileName.equals(that.fileName) + && fileExtension.equals(that.fileExtension) + && Arrays.equals(headLineElements, that.headLineElements); + } + + @Override + public int hashCode() { + int result = Objects.hash(fileName, fileExtension); + result = 31 * result + Arrays.hashCode(headLineElements); + return result; + } + + @Override + public String toString() { + return "FileDefinition{" + + "fileName='" + + fileName + + '\'' + + ", fileExtension='" + + fileExtension + + '\'' + + ", headLineElements=" + + Arrays.toString(headLineElements) + + '}'; + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java b/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java index 15d14dde4..3b0eb30e1 100644 --- a/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java +++ b/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java @@ -6,8 +6,7 @@ package edu.ie3.datamodel.io.connectors; import edu.ie3.datamodel.exceptions.ConnectorException; -import edu.ie3.datamodel.io.FileNamingStrategy; -import edu.ie3.datamodel.models.UniqueEntity; +import edu.ie3.datamodel.io.CsvFileDefinition; import edu.ie3.util.io.FileIOUtils; import java.io.BufferedWriter; import java.io.File; @@ -27,110 +26,148 @@ public class CsvFileConnector implements DataConnector { private static final Logger log = LogManager.getLogger(CsvFileConnector.class); - private final Map, BufferedWriter> writers = new HashMap<>(); - private final FileNamingStrategy fileNamingStrategy; + private final Map> fileToWriter; + private final String csvSeparator; private final String baseFolderName; private static final String FILE_ENDING = ".csv"; - public CsvFileConnector(String baseFolderName, FileNamingStrategy fileNamingStrategy) { + /** + * Creates a file connector. + * + * @param baseFolderName Path to where the file may be located + * @param fileDefinitions Collection of file definitions. + * @param csvSeparator Separator character to separate csv columns + * @param initFiles true, if the files should be created during initialization (might create + * files, that only consist of a headline, because no data will be written into them), false + * otherwise + * @throws ConnectorException If the connector cannot be established + */ + public CsvFileConnector( + String baseFolderName, + Collection fileDefinitions, + String csvSeparator, + boolean initFiles) + throws ConnectorException { this.baseFolderName = baseFolderName; - this.fileNamingStrategy = fileNamingStrategy; + this.csvSeparator = csvSeparator; + this.fileToWriter = mapFileDefinitionsToWriter(fileDefinitions, initFiles); + } + + /** + * Builds the mapping from file definition to option to an equivalent writer. + * + * @param fileDefinitions Collections of file definitions + * @param initWriter true, if the writers may be instantiated + * @return An unmodifiable map of file definitions to writers + * @throws ConnectorException If the initialization of any writer was unsuccessful + */ + private Map> mapFileDefinitionsToWriter( + Collection fileDefinitions, boolean initWriter) throws ConnectorException { + Map> map = new HashMap<>(); + for (CsvFileDefinition fileDefinition : fileDefinitions) { + if (!initWriter) map.put(fileDefinition, Optional.empty()); + else { + try { + BufferedWriter writer = + initWriter(fileDefinition.getFileName(), fileDefinition.getHeadLineElements()); + map.put(fileDefinition, Optional.of(writer)); + } catch (ConnectorException | IOException e) { + throw new ConnectorException("Cannot build a writer for \"" + fileDefinition + "\".", e); + } + } + } + return map; } + /** Closes all buffered writers */ @Override public void shutdown() { - - writers + fileToWriter .values() .forEach( - bufferedWriter -> { - try { - bufferedWriter.close(); - } catch (IOException e) { - log.error("Error during CsvFileConnector shutdown process.", e); - } - }); - } - - public BufferedWriter initWriter( - Class clz, String[] headerElements, String csvSep) - throws ConnectorException, IOException { - return initWriter(baseFolderName, clz, fileNamingStrategy, headerElements, csvSep); + bufferedWriter -> + bufferedWriter.ifPresent( + writer -> { + try { + writer.close(); + } catch (IOException e) { + log.error("Error during CsvFileConnector shutdown process.", e); + } + })); } - public Optional getWriter(Class clz) { - return Optional.ofNullable(writers.get(clz)); - } - - public BufferedWriter getOrInitWriter( - Class clz, String[] headerElements, String csvSep) { - - return getWriter(clz) - .orElseGet( - () -> { - BufferedWriter newWriter = null; - try { - newWriter = initWriter(clz, headerElements, csvSep); - } catch (ConnectorException | IOException e) { - log.error("Error while initiating writer in CsvFileConnector.", e); - } - - writers.put(clz, newWriter); - return newWriter; - }); + /** + * Returns a suitable writer that writes to the given file definition + * + * @param fileDefinition Queried file definition + * @return An option to the writer + */ + public BufferedWriter getWriter(CsvFileDefinition fileDefinition) throws ConnectorException { + if (!fileToWriter.containsKey(fileDefinition)) + throw new ConnectorException( + "There is no file writer associated with this definition: " + fileDefinition); + + Optional writerOption = fileToWriter.get(fileDefinition); + if (writerOption.isPresent()) return writerOption.get(); + else { + try { + BufferedWriter writer = + initWriter(fileDefinition.getFileName(), fileDefinition.getHeadLineElements()); + fileToWriter.put(fileDefinition, Optional.of(writer)); + return writer; + } catch (ConnectorException | IOException e) { + throw new ConnectorException("Cannot build a writer for \"" + fileDefinition + "\".", e); + } + } } - private BufferedWriter initWriter( - String baseFolderName, - Class clz, - FileNamingStrategy fileNamingStrategy, - String[] headerElements, - String csvSep) + /** + * Initialises a writer for the given file definition + * + * @param fileName File name + * @param headLineElements Array of head line elements for the implicitly created file + * @return A writer denoted to that file name + * @throws ConnectorException If the base folder path is already occupied + * @throws IOException In any case writing to the file fails. + */ + private BufferedWriter initWriter(String fileName, String[] headLineElements) throws ConnectorException, IOException { File basePathDir = new File(baseFolderName); if (basePathDir.isFile()) throw new ConnectorException( "Base path dir '" + baseFolderName + "' already exists and is a file!"); if (!basePathDir.exists()) basePathDir.mkdirs(); - - String fileName = - fileNamingStrategy - .getFileName(clz) - .orElseThrow( - () -> - new ConnectorException( - "Cannot determine the file name for provided class '" - + clz.getSimpleName() - + "'.")); - String fullPath = baseFolderName + File.separator + fileName + FILE_ENDING; + String fullPath = baseFolderName + File.separator + fileName + CsvFileConnector.FILE_ENDING; BufferedWriter writer = FileIOUtils.getBufferedWriterUTF8(fullPath); // write header - writeFileHeader(clz, writer, headerElements, csvSep); + writeFileHeader(writer, headLineElements, csvSeparator); return writer; } - private void writeFileHeader( - Class clz, - BufferedWriter writer, - final String[] headerElements, - String csvSep) { - try { - for (int i = 0; i < headerElements.length; i++) { - String attribute = headerElements[i]; - writer.append("\"").append(attribute).append("\""); // adds " to headline - if (i + 1 < headerElements.length) { - writer.append(csvSep); - } else { - writer.append("\n"); - } + /** + * Writes the headline to the file implicitly provided by the writer. All entries are quoted + * ("bla","foo",...). + * + * @param writer Buffered writer to use + * @param headerElements Head line elements + * @param csvSep Separator character to separate csv columns + * @throws IOException when the head line appending does not work. + */ + private void writeFileHeader(BufferedWriter writer, final String[] headerElements, String csvSep) + throws IOException { + for (int i = 0; i < headerElements.length; i++) { + String attribute = headerElements[i]; + writer.append("\"").append(attribute).append("\""); // adds " to headline + if (i + 1 < headerElements.length) { + writer.append(csvSep); + } else { + writer.append("\n"); } - writer.flush(); - } catch (IOException e) { - log.error("Error during file header creation for class '" + clz.getSimpleName() + "'.", e); } + writer.flush(); } } diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index dbc72b982..c361d68e4 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -5,11 +5,11 @@ */ package edu.ie3.datamodel.io.sink; +import edu.ie3.datamodel.exceptions.ConnectorException; import edu.ie3.datamodel.exceptions.SinkException; -import edu.ie3.datamodel.io.FileNamingStrategy; +import edu.ie3.datamodel.io.CsvFileDefinition; import edu.ie3.datamodel.io.connectors.CsvFileConnector; import edu.ie3.datamodel.io.connectors.DataConnector; -import edu.ie3.datamodel.io.processor.ProcessorProvider; import edu.ie3.datamodel.models.UniqueEntity; import java.io.BufferedWriter; import java.io.IOException; @@ -23,40 +23,33 @@ * @version 0.1 * @since 19.03.20 */ -public class CsvFileSink implements DataSink { +public class CsvFileSink implements DataSink> { private static final Logger log = LogManager.getLogger(CsvFileSink.class); private final CsvFileConnector connector; - private final ProcessorProvider processorProvider; private final String csvSep; - public CsvFileSink(String baseFolderPath) { - this(baseFolderPath, new ProcessorProvider(), new FileNamingStrategy(), false, ","); - } - /** - * Create an instance of a csv file sink - * * @param baseFolderPath the base folder path where the files should be put into - * @param processorProvider the processor provided that should be used for entity de-serialization - * @param fileNamingStrategy the file naming strategy that should be used - * @param initFiles true if the files should be created during initialization (might create files, - * that only consist of a headline, because no data will be writen into them), false otherwise - * @param csvSep the csv file separator that should be use + * @param fileDefinitions Collection of file definitions + * @param csvSep csv file separator that should be use + * @param initFiles true, if the files should be created during initialization (might create + * files, that only consist of a headline, because no data will be written into them), false + * otherwise */ public CsvFileSink( String baseFolderPath, - ProcessorProvider processorProvider, - FileNamingStrategy fileNamingStrategy, - boolean initFiles, - String csvSep) { + Collection fileDefinitions, + String csvSep, + boolean initFiles) { this.csvSep = csvSep; - this.processorProvider = processorProvider; - this.connector = new CsvFileConnector(baseFolderPath, fileNamingStrategy); - - if (initFiles) initFiles(processorProvider, connector); + try { + this.connector = new CsvFileConnector(baseFolderPath, fileDefinitions, csvSep, initFiles); + } catch (ConnectorException e) { + throw new SinkException("Error during initialization of the file sink.", e); + } } @Override @@ -64,51 +57,56 @@ public DataConnector getDataConnector() { return connector; } + /** + * Persists the given data to the specified location. + * + * @param destination Specific location of the data + * @param data Data to persist + */ @Override - public void persistAll(Collection entities) { - for (T entity : entities) { - persist(entity); + public void persist(CsvFileDefinition destination, LinkedHashMap data) { + BufferedWriter writer; + try { + writer = connector.getWriter(destination); + } catch (ConnectorException e) { + throw new SinkException( + "Cannot find a matching writer for file definition: \"" + destination + "\".", e); } - } - @Override - public void persist(T entity) { - - LinkedHashMap entityFieldData = - processorProvider - .processEntity(entity) - .orElseThrow( - () -> - new SinkException( - "Cannot persist entity of type '" - + entity.getClass().getSimpleName() - + "'. Is this sink properly initialized?")); + if (data.keySet().size() != destination.getHeadLineElements().length + || !data.keySet().containsAll(Arrays.asList(destination.getHeadLineElements()))) { + throw new SinkException("The provided data does not match the head line definition!"); + } - String[] headerElements = - processorProvider.getHeaderElements(entity.getClass()).orElse(new String[0]); - BufferedWriter writer = connector.getOrInitWriter(entity.getClass(), headerElements, csvSep); - write(entityFieldData, headerElements, writer); + write(data, destination.getHeadLineElements(), writer); } /** - * Initialize files, hence create a file for each expected class that will be processed in the - * future. + * Persists the given amount of data to the specified location. * - * @param processorProvider the processor provider all files that will be processed is derived - * from - * @param connector the connector to the files + * @param destination Specific location of the data + * @param data Data to persist */ - private void initFiles( - final ProcessorProvider processorProvider, final CsvFileConnector connector) { + @Override + public void persistAll( + CsvFileDefinition destination, Collection> data) { + BufferedWriter writer; + try { + writer = connector.getWriter(destination); + } catch (ConnectorException e) { + throw new SinkException( + "Cannot find a matching writer for file definition: \"" + destination + "\".", e); + } + + data.forEach( + entry -> { + if (entry.keySet().size() != destination.getHeadLineElements().length + || !entry.keySet().containsAll(Arrays.asList(destination.getHeadLineElements()))) { + throw new SinkException("The provided data does not match the head line definition!"); + } - processorProvider - .getRegisteredClasses() - .forEach( - clz -> - processorProvider - .getHeaderElements(clz) - .ifPresent( - headerElements -> connector.getOrInitWriter(clz, headerElements, csvSep))); + write(entry, destination.getHeadLineElements(), writer); + }); } /** diff --git a/src/main/java/edu/ie3/datamodel/io/sink/DataSink.java b/src/main/java/edu/ie3/datamodel/io/sink/DataSink.java index 622790d94..fe384d703 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/DataSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/DataSink.java @@ -5,17 +5,22 @@ */ package edu.ie3.datamodel.io.sink; +import edu.ie3.datamodel.io.Destination; import edu.ie3.datamodel.io.connectors.DataConnector; -import edu.ie3.datamodel.models.UniqueEntity; import java.util.Collection; -/** Describes a class that manages data persistence */ -public interface DataSink { +/** + * Describes a class that manages data persistence + * + * @param Type of destination definition for a piece of data + * @param Type of data, the sink is supposed to handle + */ +public interface DataSink { /** @return the connector of this sink */ DataConnector getDataConnector(); - void persist(C entity); + void persist(D destination, T data); - void persistAll(Collection entities); + void persistAll(D destination, Collection data); } diff --git a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy index fa860ad24..7ab046ef9 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy @@ -6,27 +6,41 @@ package edu.ie3.datamodel.io.sink import edu.ie3.datamodel.exceptions.SinkException +import edu.ie3.datamodel.io.CsvFileDefinition import edu.ie3.datamodel.io.FileNamingStrategy -import edu.ie3.datamodel.io.processor.ProcessorProvider import edu.ie3.datamodel.io.processor.result.ResultEntityProcessor -import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.result.system.EvResult import edu.ie3.datamodel.models.result.system.PvResult import edu.ie3.datamodel.models.result.system.WecResult -import edu.ie3.util.TimeTools import edu.ie3.util.io.FileIOUtils import spock.lang.Shared import spock.lang.Specification -import tec.uom.se.quantity.Quantities - -import javax.measure.Quantity -import javax.measure.quantity.Power class CsvFileSinkTest extends Specification { @Shared String testBaseFolderPath = "test" + @Shared + CsvFileDefinition pvResultFileDefinition + + @Shared + CsvFileDefinition evResultFileDefinition + + @Shared + CsvFileDefinition wecResultFileDefinition + + def setupSpec() { + FileNamingStrategy fileNamingStrategy = new FileNamingStrategy() + ResultEntityProcessor pvResultEntityProcessor = new ResultEntityProcessor(PvResult) + ResultEntityProcessor evResultEntityProcessor = new ResultEntityProcessor(EvResult) + ResultEntityProcessor wecResultEntityProcessor = new ResultEntityProcessor(WecResult) + + pvResultFileDefinition = new CsvFileDefinition(fileNamingStrategy.getFileName(PvResult).get(), pvResultEntityProcessor.getHeaderElements()) + evResultFileDefinition = new CsvFileDefinition(fileNamingStrategy.getFileName(EvResult).get(), evResultEntityProcessor.getHeaderElements()) + wecResultFileDefinition = new CsvFileDefinition(fileNamingStrategy.getFileName(WecResult).get(), wecResultEntityProcessor.getHeaderElements()) + } + def cleanup() { // delete files after each test if they exist if (new File(testBaseFolderPath).exists()) { @@ -34,55 +48,60 @@ class CsvFileSinkTest extends Specification { } } - def "A valid CsvFileSink called by simple constructor should not initialize files by default and consist of several default values"() { + def "A valid CsvFileSink with 'initFiles' enabled should create files as expected"() { given: - CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath) + CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, [ + pvResultFileDefinition, + evResultFileDefinition + ], ",", true) csvFileSink.dataConnector.shutdown() expect: - !new File(testBaseFolderPath).exists() - csvFileSink.csvSep == "," + new File(testBaseFolderPath).exists() + new File(testBaseFolderPath + File.separator + "ev_res.csv").exists() + new File(testBaseFolderPath + File.separator + "pv_res.csv").exists() } - def "A valid CsvFileSink with 'initFiles' enabled should create files as expected"() { + def "A valid CsvFileSink without 'initFiles' enabled should create files as expected"() { given: - CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, - new ProcessorProvider([ - new ResultEntityProcessor(PvResult), - new ResultEntityProcessor(EvResult) - ]), - new FileNamingStrategy(), - true, - ",") + CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, [ + pvResultFileDefinition, + evResultFileDefinition + ], ",", false) csvFileSink.dataConnector.shutdown() expect: - new File(testBaseFolderPath).exists() - new File(testBaseFolderPath + File.separator + "ev_res.csv").exists() - new File(testBaseFolderPath + File.separator + "pv_res.csv").exists() + !new File(testBaseFolderPath).exists() + !new File(testBaseFolderPath + File.separator + "ev_res.csv").exists() + !new File(testBaseFolderPath + File.separator + "pv_res.csv").exists() } def "A valid CsvFileSink without 'initFiles' should only persist provided elements correctly but not all files"() { given: - CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, - new ProcessorProvider([ - new ResultEntityProcessor(PvResult), - new ResultEntityProcessor(WecResult), - new ResultEntityProcessor(EvResult) - ]), - new FileNamingStrategy(), - false, - ",") - - UUID uuid = UUID.fromString("22bea5fc-2cb2-4c61-beb9-b476e0107f52") - UUID inputModel = UUID.fromString("22bea5fc-2cb2-4c61-beb9-b476e0107f52") - Quantity p = Quantities.getQuantity(10, StandardUnits.ACTIVE_POWER_IN) - Quantity q = Quantities.getQuantity(10, StandardUnits.REACTIVE_POWER_IN) - PvResult pvResult = new PvResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) - WecResult wecResult = new WecResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) + CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, [ + pvResultFileDefinition, + evResultFileDefinition, + wecResultFileDefinition + ], ",", false) + + LinkedHashMap pvResult = [ + "uuid": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", + "inputModel": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", + "timestamp": "2020-01-30 17:26:44", + "p": "0.01", + "q": "0.01" + ] + LinkedHashMap wecResult = [ + "uuid": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", + "inputModel": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", + "timestamp": "2020-01-30 17:26:44", + "p": "0.01", + "q": "0.01" + ] when: - csvFileSink.persistAll([pvResult, wecResult]) + csvFileSink.persist(pvResultFileDefinition, pvResult) + csvFileSink.persist(wecResultFileDefinition, wecResult) csvFileSink.dataConnector.shutdown() then: @@ -93,27 +112,45 @@ class CsvFileSinkTest extends Specification { !new File(testBaseFolderPath + File.separator + "ev_res.csv").exists() } + def "A valid CsvFileSink throws a SinkException, if the data does not fit the header definition"() { + given: + CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, [pvResultFileDefinition], ",", false) + + LinkedHashMap pvResult = [ + "uuid": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", + "lilaLauneBaer": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", + "timestamp": "2020-01-30 17:26:44", + "p": "0.01", + "q": "0.01" + ] + + when: + csvFileSink.persist(pvResultFileDefinition, pvResult) + + then: + SinkException exception = thrown() + csvFileSink.dataConnector.shutdown() + + exception.message == "The provided data does not match the head line definition!" + } + def "A valid CsvFileSink should throw an exception if the provided entity cannot be handled"() { given: - CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, - new ProcessorProvider([ - new ResultEntityProcessor(PvResult) - ]), - new FileNamingStrategy(), - false, - ",") - - UUID uuid = UUID.fromString("22bea5fc-2cb2-4c61-beb9-b476e0107f52") - UUID inputModel = UUID.fromString("22bea5fc-2cb2-4c61-beb9-b476e0107f52") - Quantity p = Quantities.getQuantity(10, StandardUnits.ACTIVE_POWER_IN) - Quantity q = Quantities.getQuantity(10, StandardUnits.REACTIVE_POWER_IN) - WecResult wecResult = new WecResult(uuid, TimeTools.toZonedDateTime("2020-01-30 17:26:44"), inputModel, p, q) + CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, [pvResultFileDefinition], ",", false) + LinkedHashMap wecResult = [ + "uuid": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", + "inputModel": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", + "timestamp": "2020-01-30 17:26:44", + "p": "0.01", + "q": "0.01" + ] when: - csvFileSink.persist(wecResult) + csvFileSink.persist(wecResultFileDefinition, wecResult) csvFileSink.dataConnector.shutdown() then: - thrown(SinkException) + SinkException exception = thrown(SinkException) + exception.getMessage() == "Cannot find a matching writer for file definition: \"CsvFileDefinition{fileName='wec_res', fileExtension='csv', headLineElements=[uuid, inputModel, p, q, timestamp]}\"." } } From 8e519aa7de3c811e196feb3ab5519d57671f7c70 Mon Sep 17 00:00:00 2001 From: "Kittl, Chris" Date: Tue, 31 Mar 2020 18:07:32 +0200 Subject: [PATCH 06/13] Adapting time series constructor to maintain uuid information of single time steps --- .../ie3/datamodel/models/timeseries/TimeSeries.java | 4 ++-- .../timeseries/IndividualTimeSeriesSpec.groovy | 12 +++++------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/models/timeseries/TimeSeries.java b/src/main/java/edu/ie3/datamodel/models/timeseries/TimeSeries.java index 8e13f1900..7f24918a5 100644 --- a/src/main/java/edu/ie3/datamodel/models/timeseries/TimeSeries.java +++ b/src/main/java/edu/ie3/datamodel/models/timeseries/TimeSeries.java @@ -62,11 +62,11 @@ public Optional> getTimeBasedValue(ZonedDateTime time) { * @return the most recent available value before or at the given time step as a TimeBasedValue */ public Optional> getPreviousTimeBasedValue(ZonedDateTime time) { - return getPreviousDateTime(time).map(this::getPreviousTimeBasedValue).map(Optional::get); + return getPreviousDateTime(time).map(this::getTimeBasedValue).map(Optional::get); } /** @return the next available value after or at the given time step as a TimeBasedValue */ public Optional> getNextTimeBasedValue(ZonedDateTime time) { - return getNextDateTime(time).map(this::getPreviousTimeBasedValue).map(Optional::get); + return getNextDateTime(time).map(this::getTimeBasedValue).map(Optional::get); } } diff --git a/src/test/groovy/edu/ie3/datamodel/models/timeseries/IndividualTimeSeriesSpec.groovy b/src/test/groovy/edu/ie3/datamodel/models/timeseries/IndividualTimeSeriesSpec.groovy index f4b997eb4..1ab66eae8 100644 --- a/src/test/groovy/edu/ie3/datamodel/models/timeseries/IndividualTimeSeriesSpec.groovy +++ b/src/test/groovy/edu/ie3/datamodel/models/timeseries/IndividualTimeSeriesSpec.groovy @@ -15,13 +15,11 @@ import java.time.ZonedDateTime class IndividualTimeSeriesSpec extends Specification { @Shared IndividualTimeSeries timeSeries = new IndividualTimeSeries<>( - UUID.randomUUID(), - new HashMap() { { - put(ZonedDateTime.of(1990, 1, 1, 0, 0, 0, 0, ZoneId.of("UTC")), new IntValue(3)) - put(ZonedDateTime.of(1990, 1, 1, 0, 15, 0, 0, ZoneId.of("UTC")), new IntValue(4)) - put(ZonedDateTime.of(1990, 1, 1, 0, 30, 0, 0, ZoneId.of("UTC")), new IntValue(1)) - } - }) + UUID.randomUUID(), [ + new TimeBasedValue<>(ZonedDateTime.of(1990, 1, 1, 0, 0, 0, 0, ZoneId.of("UTC")), new IntValue(3)), + new TimeBasedValue<>(ZonedDateTime.of(1990, 1, 1, 0, 15, 0, 0, ZoneId.of("UTC")), new IntValue(4)), + new TimeBasedValue<>(ZonedDateTime.of(1990, 1, 1, 0, 30, 0, 0, ZoneId.of("UTC")), new IntValue(1)) + ]) def "Return empty optional value when queried for non existent time" () { expect: From 1a12e152806a2a6090516a18c9439759ea4a4b8d Mon Sep 17 00:00:00 2001 From: "Kittl, Chris" Date: Tue, 31 Mar 2020 19:00:36 +0200 Subject: [PATCH 07/13] Allow for later registration of data destinations in CsvFileSink --- .../io/connectors/CsvFileConnector.java | 15 +++++++-- .../ie3/datamodel/io/sink/CsvFileSink.java | 9 +++-- .../datamodel/io/sink/CsvFileSinkTest.groovy | 33 +++++++++++++++---- 3 files changed, 46 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java b/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java index 3b0eb30e1..31b907eb7 100644 --- a/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java +++ b/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java @@ -26,6 +26,8 @@ public class CsvFileConnector implements DataConnector { private static final Logger log = LogManager.getLogger(CsvFileConnector.class); + private final boolean allowLaterRegistration; + private final Map> fileToWriter; private final String csvSeparator; private final String baseFolderName; @@ -41,17 +43,20 @@ public class CsvFileConnector implements DataConnector { * @param initFiles true, if the files should be created during initialization (might create * files, that only consist of a headline, because no data will be written into them), false * otherwise + * @param allowLaterRegistration Allow for registering destination later, when acquiring writers * @throws ConnectorException If the connector cannot be established */ public CsvFileConnector( String baseFolderName, Collection fileDefinitions, String csvSeparator, - boolean initFiles) + boolean initFiles, + boolean allowLaterRegistration) throws ConnectorException { this.baseFolderName = baseFolderName; this.csvSeparator = csvSeparator; this.fileToWriter = mapFileDefinitionsToWriter(fileDefinitions, initFiles); + this.allowLaterRegistration = allowLaterRegistration; } /** @@ -105,8 +110,12 @@ public void shutdown() { */ public BufferedWriter getWriter(CsvFileDefinition fileDefinition) throws ConnectorException { if (!fileToWriter.containsKey(fileDefinition)) - throw new ConnectorException( - "There is no file writer associated with this definition: " + fileDefinition); + if (!allowLaterRegistration) { + throw new ConnectorException( + "There is no file writer associated with this definition: " + fileDefinition); + } else { + fileToWriter.put(fileDefinition, Optional.empty()); + } Optional writerOption = fileToWriter.get(fileDefinition); if (writerOption.isPresent()) return writerOption.get(); diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index c361d68e4..f5e636eba 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -38,15 +38,20 @@ public class CsvFileSink implements DataSink fileDefinitions, String csvSep, - boolean initFiles) { + boolean initFiles, + boolean allowLaterRegistration) { this.csvSep = csvSep; try { - this.connector = new CsvFileConnector(baseFolderPath, fileDefinitions, csvSep, initFiles); + this.connector = + new CsvFileConnector( + baseFolderPath, fileDefinitions, csvSep, initFiles, allowLaterRegistration); } catch (ConnectorException e) { throw new SinkException("Error during initialization of the file sink.", e); } diff --git a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy index 7ab046ef9..2adb7491e 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy @@ -53,7 +53,7 @@ class CsvFileSinkTest extends Specification { CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, [ pvResultFileDefinition, evResultFileDefinition - ], ",", true) + ], ",", true, false) csvFileSink.dataConnector.shutdown() expect: @@ -67,7 +67,7 @@ class CsvFileSinkTest extends Specification { CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, [ pvResultFileDefinition, evResultFileDefinition - ], ",", false) + ], ",", false, false) csvFileSink.dataConnector.shutdown() expect: @@ -82,7 +82,7 @@ class CsvFileSinkTest extends Specification { pvResultFileDefinition, evResultFileDefinition, wecResultFileDefinition - ], ",", false) + ], ",", false, false) LinkedHashMap pvResult = [ "uuid": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", @@ -114,7 +114,7 @@ class CsvFileSinkTest extends Specification { def "A valid CsvFileSink throws a SinkException, if the data does not fit the header definition"() { given: - CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, [pvResultFileDefinition], ",", false) + CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, [pvResultFileDefinition], ",", false, false) LinkedHashMap pvResult = [ "uuid": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", @@ -134,9 +134,9 @@ class CsvFileSinkTest extends Specification { exception.message == "The provided data does not match the head line definition!" } - def "A valid CsvFileSink should throw an exception if the provided entity cannot be handled"() { + def "A valid CsvFileSink should throw an exception if the provided destination is not registered and later registration is prohibited"() { given: - CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, [pvResultFileDefinition], ",", false) + CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, [pvResultFileDefinition], ",", false, false) LinkedHashMap wecResult = [ "uuid": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", "inputModel": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", @@ -153,4 +153,25 @@ class CsvFileSinkTest extends Specification { SinkException exception = thrown(SinkException) exception.getMessage() == "Cannot find a matching writer for file definition: \"CsvFileDefinition{fileName='wec_res', fileExtension='csv', headLineElements=[uuid, inputModel, p, q, timestamp]}\"." } + + def "A valid CsvFileSink registers a new destination if the provided destination is not registered and later registration is allowed"() { + given: + CsvFileSink csvFileSink = new CsvFileSink(testBaseFolderPath, [pvResultFileDefinition], ",", false, true) + LinkedHashMap wecResult = [ + "uuid": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", + "inputModel": "22bea5fc-2cb2-4c61-beb9-b476e0107f52", + "timestamp": "2020-01-30 17:26:44", + "p": "0.01", + "q": "0.01" + ] + + when: + csvFileSink.persist(wecResultFileDefinition, wecResult) + csvFileSink.dataConnector.shutdown() + + then: + new File(testBaseFolderPath).exists() + new File(testBaseFolderPath + File.separator + "wec_res.csv").exists() + !new File(testBaseFolderPath + File.separator + "pv_res.csv").exists() // as it is not initialized + } } From 1b95ea541af517319dd6126ae2c26b539bb5d09f Mon Sep 17 00:00:00 2001 From: "Kittl, Chris" Date: Tue, 31 Mar 2020 20:25:43 +0200 Subject: [PATCH 08/13] Adapting the definition of valid file names to allow for uuids as part of the file name --- src/main/java/edu/ie3/datamodel/io/FileDefinition.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/FileDefinition.java b/src/main/java/edu/ie3/datamodel/io/FileDefinition.java index bf5983481..9aea815d9 100644 --- a/src/main/java/edu/ie3/datamodel/io/FileDefinition.java +++ b/src/main/java/edu/ie3/datamodel/io/FileDefinition.java @@ -16,7 +16,7 @@ * from */ public class FileDefinition implements Destination { - private static final Pattern fileNamePattern = Pattern.compile("[\\w\\\\/]+"); + private static final Pattern fileNamePattern = Pattern.compile("[\\w\\\\/-]+"); private static final Pattern extensionPattern = Pattern.compile("\\.?([\\w\\.]+)$"); private static final Pattern fullPathPattern = Pattern.compile("(" + fileNamePattern.pattern() + ")\\.+(\\w+)"); @@ -40,8 +40,8 @@ public FileDefinition(String fileName, String fileExtension, String[] headLineEl if (fileExtension.matches(fullPathPattern.pattern())) { Matcher matcher = extensionPattern.matcher(fileExtension); matcher.matches(); - this.fileExtension = matcher.group(2).replaceAll("\\.", ""); - } else if (fileName.matches(extensionPattern.pattern())) { + this.fileExtension = matcher.group(1).replaceAll("\\.", ""); + } else if (fileExtension.matches(extensionPattern.pattern())) { Matcher matcher = extensionPattern.matcher(fileExtension); matcher.matches(); this.fileExtension = matcher.group(0).replaceAll("\\.", ""); From ebbb5273bf36528783ea11881f907fe98053997f Mon Sep 17 00:00:00 2001 From: "Kittl, Chris" Date: Tue, 31 Mar 2020 20:26:16 +0200 Subject: [PATCH 09/13] Enable to set UUIDs explicitly to time based values --- .../edu/ie3/datamodel/models/value/TimeBasedValue.java | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/models/value/TimeBasedValue.java b/src/main/java/edu/ie3/datamodel/models/value/TimeBasedValue.java index 6478246be..6bc8afec6 100644 --- a/src/main/java/edu/ie3/datamodel/models/value/TimeBasedValue.java +++ b/src/main/java/edu/ie3/datamodel/models/value/TimeBasedValue.java @@ -8,6 +8,7 @@ import edu.ie3.datamodel.models.UniqueEntity; import java.time.ZonedDateTime; import java.util.Objects; +import java.util.UUID; /** * Describes an entity of a time series by mapping a value to a timestamp @@ -21,9 +22,14 @@ public class TimeBasedValue extends UniqueEntity private ZonedDateTime time; - public TimeBasedValue(ZonedDateTime time, T value) { - this.value = value; + public TimeBasedValue(UUID uuid, ZonedDateTime time, T value) { + super(uuid); this.time = time; + this.value = value; + } + + public TimeBasedValue(ZonedDateTime time, T value) { + this(UUID.randomUUID(), time, value); } public T getValue() { From d792625bba10e56d9c4a2fb05d360c7687e6271b Mon Sep 17 00:00:00 2001 From: "Kittl, Chris" Date: Tue, 31 Mar 2020 20:27:06 +0200 Subject: [PATCH 10/13] Deserialize time series --- .../exceptions/DeserializationException.java | 8 ++ .../ie3/datamodel/io/FileNamingStrategy.java | 25 ++++ .../IndividualTimeSeriesDeserializer.java | 91 ++++++++++++++ .../LoadProfileInputDeserializer.java | 56 +++++++++ .../deserialize/TimeSeriesDeserializer.java | 105 ++++++---------- .../TimeSeriesDeserializerTest.groovy | 118 ++++++++++++++++++ .../input/TimeBasedValueProcessorTest.groovy | 19 +-- 7 files changed, 340 insertions(+), 82 deletions(-) create mode 100644 src/main/java/edu/ie3/datamodel/io/deserialize/IndividualTimeSeriesDeserializer.java create mode 100644 src/main/java/edu/ie3/datamodel/io/deserialize/LoadProfileInputDeserializer.java create mode 100644 src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy diff --git a/src/main/java/edu/ie3/datamodel/exceptions/DeserializationException.java b/src/main/java/edu/ie3/datamodel/exceptions/DeserializationException.java index 59e616e7a..53a8a7cc6 100644 --- a/src/main/java/edu/ie3/datamodel/exceptions/DeserializationException.java +++ b/src/main/java/edu/ie3/datamodel/exceptions/DeserializationException.java @@ -6,6 +6,14 @@ package edu.ie3.datamodel.exceptions; public class DeserializationException extends Exception { + public DeserializationException(String message) { + super(message); + } + + public DeserializationException(String message, Throwable cause) { + super(message, cause); + } + public DeserializationException(String message, Object deserializationObject) { super(message + "\nAffected object to be deseralized: " + deserializationObject.toString()); } diff --git a/src/main/java/edu/ie3/datamodel/io/FileNamingStrategy.java b/src/main/java/edu/ie3/datamodel/io/FileNamingStrategy.java index 4cc8463c3..12037475c 100644 --- a/src/main/java/edu/ie3/datamodel/io/FileNamingStrategy.java +++ b/src/main/java/edu/ie3/datamodel/io/FileNamingStrategy.java @@ -8,11 +8,13 @@ import edu.ie3.datamodel.models.UniqueEntity; import edu.ie3.datamodel.models.input.AssetInput; import edu.ie3.datamodel.models.input.AssetTypeInput; +import edu.ie3.datamodel.models.input.LoadProfileInput; import edu.ie3.datamodel.models.input.RandomLoadParameters; import edu.ie3.datamodel.models.input.graphics.GraphicInput; import edu.ie3.datamodel.models.input.system.characteristic.AssetCharacteristicInput; import edu.ie3.datamodel.models.result.ResultEntity; import java.util.Optional; +import java.util.UUID; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -31,6 +33,7 @@ public class FileNamingStrategy { private static final String INPUT_ENTITY_SUFFIX = "_input"; private static final String TYPE_INPUT = "_type_input"; private static final String GRAPHIC_INPUT_SUFFIX = "_graphic"; + private static final String TIME_SERIES_SUFFIX = "_timeseries"; private static final String INPUT_CLASS_STRING = "Input"; @@ -83,6 +86,28 @@ public Optional getFileName(Class cls) { return Optional.empty(); } + public Optional getIndividualTimeSeriesFileName(UUID uuid) { + return Optional.of( + prefix + .concat("individual") + .concat(TIME_SERIES_SUFFIX) + .concat("_") + .concat(uuid.toString()) + .concat(suffix)); + } + + public Optional getLoadProfileInputFileName(UUID uuid) { + String identifier = + LoadProfileInput.class.getSimpleName().replace(INPUT_CLASS_STRING, "").toLowerCase(); + return Optional.of( + prefix + .concat(identifier) + .concat(TIME_SERIES_SUFFIX) + .concat("_") + .concat(uuid.toString()) + .concat(suffix)); + } + /** * Get the the file name for all {@link GraphicInput}s * diff --git a/src/main/java/edu/ie3/datamodel/io/deserialize/IndividualTimeSeriesDeserializer.java b/src/main/java/edu/ie3/datamodel/io/deserialize/IndividualTimeSeriesDeserializer.java new file mode 100644 index 000000000..4c4dfad76 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/deserialize/IndividualTimeSeriesDeserializer.java @@ -0,0 +1,91 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.deserialize; + +import edu.ie3.datamodel.exceptions.DeserializationException; +import edu.ie3.datamodel.io.CsvFileDefinition; +import edu.ie3.datamodel.io.FileNamingStrategy; +import edu.ie3.datamodel.io.processor.input.TimeBasedValueProcessor; +import edu.ie3.datamodel.models.timeseries.IndividualTimeSeries; +import edu.ie3.datamodel.models.value.TimeBasedValue; +import edu.ie3.datamodel.models.value.Value; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class IndividualTimeSeriesDeserializer + extends TimeSeriesDeserializer, V> { + private final TimeBasedValueProcessor timeBasedValueProcessor = new TimeBasedValueProcessor(); + private final String[] headLineElements; + + public IndividualTimeSeriesDeserializer(Class valueClass, String baseFolderPath) { + super(valueClass, baseFolderPath); + this.headLineElements = determineHeadLineElements(); + } + + @Override + protected CsvFileDefinition determineFileDefinition(UUID uuid) throws DeserializationException { + FileNamingStrategy fileNamingStrategy = new FileNamingStrategy(); + String fileName = + fileNamingStrategy + .getIndividualTimeSeriesFileName(uuid) + .orElseThrow( + () -> + new DeserializationException( + "Cannot determine file name for individual time series with uuid=" + uuid)); + return new CsvFileDefinition(fileName, headLineElements); + } + + @Override + protected String[] determineHeadLineElements() { + return Stream.of( + timeBasedValueProcessor.getHeaderElements(), valueProcessor.getHeaderElements()) + .flatMap(Arrays::stream) + .toArray(String[]::new); + } + + @Override + protected void deserialize(IndividualTimeSeries timeSeries) throws DeserializationException { + /* Get all entries */ + SortedSet> entries = timeSeries.getAllEntries(); + + Set> result = + Collections.unmodifiableSet( + entries.stream().map(this::handleTimeBasedValue).collect(Collectors.toSet())); + + /* Prepare and do writing */ + CsvFileDefinition destination = determineFileDefinition(timeSeries.getUuid()); + csvFileSink.persistAll(destination, result); + } + + /** + * Disassemble the time based value to container and actual value and process it in the equivalent + * processors + * + * @param timeBasedValue The time based value to handle + * @return A mapping from field name to value as String representation + */ + protected LinkedHashMap handleTimeBasedValue(TimeBasedValue timeBasedValue) { + /* Build the mapping from field name to value for the containing class */ + Optional> outerResult = + timeBasedValueProcessor.handleEntity(timeBasedValue); + if (!outerResult.isPresent()) { + logger.error("Cannot deserialize a time based value \"{}\".", timeBasedValue); + return new LinkedHashMap<>(); + } + + Optional> innerResult = + valueProcessor.handleEntity(timeBasedValue.getValue()); + if (!innerResult.isPresent()) { + logger.error("Cannot deserialize a time value \"{}\".", timeBasedValue.getValue()); + return new LinkedHashMap<>(); + } + + LinkedHashMap interMediateResult = outerResult.get(); + interMediateResult.putAll(innerResult.get()); + return interMediateResult; + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/deserialize/LoadProfileInputDeserializer.java b/src/main/java/edu/ie3/datamodel/io/deserialize/LoadProfileInputDeserializer.java new file mode 100644 index 000000000..7b5464b7f --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/deserialize/LoadProfileInputDeserializer.java @@ -0,0 +1,56 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.deserialize; + +import edu.ie3.datamodel.exceptions.DeserializationException; +import edu.ie3.datamodel.io.CsvFileDefinition; +import edu.ie3.datamodel.io.FileNamingStrategy; +import edu.ie3.datamodel.models.input.LoadProfileInput; +import edu.ie3.datamodel.models.value.PValue; +import java.util.UUID; + +public class LoadProfileInputDeserializer extends TimeSeriesDeserializer { + private final String[] headLineElements; + + public LoadProfileInputDeserializer(Class valueClass, String baseFolderPath) { + super(valueClass, baseFolderPath); + this.headLineElements = determineHeadLineElements(); + } + + @Override + protected CsvFileDefinition determineFileDefinition(UUID uuid) throws DeserializationException { + FileNamingStrategy fileNamingStrategy = new FileNamingStrategy(); + String fileName = + fileNamingStrategy + .getLoadProfileInputFileName(uuid) + .orElseThrow( + () -> + new DeserializationException( + "Cannot determine file name for load profile time series with uuid=" + + uuid)); + return new CsvFileDefinition(fileName, headLineElements); + } + + @Override + protected String[] determineHeadLineElements() { + return new String[0]; + } + + @Override + protected void deserialize(LoadProfileInput timeSeries) throws DeserializationException { + throw new DeserializationException( + "The deserialisation of LoadProleInput is not implemented, yet.", timeSeries); + + /* + * Steps to implement + * 1) Determine the "unique" table entries as a combination of "credentials" + * and edu.ie3.datamodel.models.value.Value + * 2) Build field name to value mapping for credentials and values independently + * 3) Combine the mapping + * 4) Write the result + */ + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializer.java b/src/main/java/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializer.java index 264985b2e..354fbf637 100644 --- a/src/main/java/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializer.java +++ b/src/main/java/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializer.java @@ -6,83 +6,50 @@ package edu.ie3.datamodel.io.deserialize; import edu.ie3.datamodel.exceptions.DeserializationException; -import edu.ie3.datamodel.io.processor.input.TimeBasedValueProcessor; +import edu.ie3.datamodel.io.CsvFileDefinition; import edu.ie3.datamodel.io.processor.input.ValueProcessor; -import edu.ie3.datamodel.models.input.LoadProfileInput; -import edu.ie3.datamodel.models.timeseries.IndividualTimeSeries; +import edu.ie3.datamodel.io.sink.CsvFileSink; import edu.ie3.datamodel.models.timeseries.TimeSeries; -import edu.ie3.datamodel.models.value.TimeBasedValue; import edu.ie3.datamodel.models.value.Value; -import java.util.*; -import java.util.stream.Collectors; +import java.util.Collections; +import java.util.UUID; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class TimeSeriesDeserializer { - private static final Logger logger = LoggerFactory.getLogger(TimeSeriesDeserializer.class); +public abstract class TimeSeriesDeserializer, V extends Value> { + protected static final Logger logger = LoggerFactory.getLogger(TimeSeriesDeserializer.class); - public void deserialize(TimeSeries timeSeries) - throws DeserializationException { - /* Distinguish between individual and repetitive time series */ - if (timeSeries instanceof IndividualTimeSeries) { - IndividualTimeSeries individualTimeSeries = (IndividualTimeSeries) timeSeries; + protected final ValueProcessor valueProcessor; - /* Get all entries */ - TimeBasedValueProcessor timeBasedValueProcessor = new TimeBasedValueProcessor(); - SortedSet> entries = individualTimeSeries.getAllEntries(); - Set> result = - Collections.unmodifiableSet( - entries.stream() - .map( - timeBasedValue -> { - /* Build the mapping from field name to value for the containing class */ - Optional> outerResult = - timeBasedValueProcessor.handleEntity(timeBasedValue); - if (!outerResult.isPresent()) { - logger.error( - "Cannot deserialize a time based value \"{}\".", timeBasedValue); - return new HashMap(); - } + private static final String CSV_SEP = ","; + protected final CsvFileSink csvFileSink; - ValueProcessor valueProcessor = - new ValueProcessor( - (Class) timeBasedValue.getValue().getClass()); - Optional> innerResult = - valueProcessor.handleEntity(timeBasedValue.getValue()); - if (!innerResult.isPresent()) { - logger.error( - "Cannot deserialize a time value \"{}\".", timeBasedValue.getValue()); - return new HashMap(); - } - - LinkedHashMap interMediateResult = outerResult.get(); - interMediateResult.putAll(innerResult.get()); - return Collections.unmodifiableMap(interMediateResult); - }) - .collect(Collectors.toSet())); - - // TODO: Writing the result - } else { - /* As repetitive time series as only abstract, determine the concrete type */ - if (timeSeries instanceof LoadProfileInput) { - LoadProfileInput loadProfile = (LoadProfileInput) timeSeries; - throw new DeserializationException( - "The deserialisation of LoadProleInput is not implemented, yet.", loadProfile); - - /* - * Steps to implement - * 1) Determine the "unique" table entries as a combination of "credentials" - * and edu.ie3.datamodel.models.value.Value - * 2) Build field name to value mapping for credentials and values independently - * 3) Combine the mapping - * 4) Write the result - */ - } else { - throw new DeserializationException( - "There is no deserialization routine defined for a time series of type " - + timeSeries.getClass().getSimpleName(), - timeSeries); - } - } + public TimeSeriesDeserializer(Class valueClass, String baseFolderPath) { + this.valueProcessor = new ValueProcessor<>(valueClass); + /* We cannot determine the file definitions on instantiation, as every unique time series gets it's own unique file name */ + this.csvFileSink = + new CsvFileSink(baseFolderPath, Collections.emptySet(), CSV_SEP, false, true); } + + /** + * Builds a file definition for a unique time series. + * + * @return A file definition + */ + protected abstract CsvFileDefinition determineFileDefinition(UUID uuid) + throws DeserializationException; + + /** + * Determine the head line elements / the field names of the model to persist + * + * @return An array of Strings denoting the field names + */ + protected abstract String[] determineHeadLineElements(); + + /** + * Deserializes the given time series + * + * @param timeSeries to deserialize + */ + protected abstract void deserialize(T timeSeries) throws DeserializationException; } diff --git a/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy new file mode 100644 index 000000000..25db53551 --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy @@ -0,0 +1,118 @@ +/* + * © 2020. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.deserialize + +import edu.ie3.datamodel.io.CsvFileDefinition +import edu.ie3.datamodel.models.timeseries.IndividualTimeSeries +import edu.ie3.datamodel.models.value.EnergyPriceValue +import edu.ie3.datamodel.models.value.TimeBasedValue +import edu.ie3.util.TimeTools +import edu.ie3.util.io.FileIOUtils +import spock.lang.Shared +import spock.lang.Specification +import tec.uom.se.quantity.Quantities + +import java.time.ZoneId + +import static edu.ie3.util.quantities.PowerSystemUnits.EURO_PER_MEGAWATTHOUR + +class TimeSeriesDeserializerTest extends Specification { + static { + TimeTools.initialize(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd HH:mm:ss") + } + + @Shared + IndividualTimeSeries individualTimeSeries + + @Shared + String[] headLineElements + + @Shared + String testBaseFolderPath + + def setupSpec() { + individualTimeSeries = new IndividualTimeSeries<>( + UUID.fromString("178892cf-500f-4e62-9d1f-ff9e3a92215e"), + [ + new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:00:00") ,new EnergyPriceValue(Quantities.getQuantity(1d, EURO_PER_MEGAWATTHOUR))), + new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:15:00") ,new EnergyPriceValue(Quantities.getQuantity(2d, EURO_PER_MEGAWATTHOUR))), + new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:30:00") ,new EnergyPriceValue(Quantities.getQuantity(3d, EURO_PER_MEGAWATTHOUR))), + new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:45:00") ,new EnergyPriceValue(Quantities.getQuantity(4d, EURO_PER_MEGAWATTHOUR))) + ] + ) + + headLineElements = ["uuid", "time", "price"] + testBaseFolderPath = "test" + } + + def cleanup() { + // delete files after each test if they exist + if (new File(testBaseFolderPath).exists()) { + FileIOUtils.deleteRecursively(testBaseFolderPath) + } + } + + def "The IndividualTimeSeriesDeserializer determines the headline elements correctly"() { + given: + IndividualTimeSeriesDeserializer timeSeriesDeserializer = new IndividualTimeSeriesDeserializer<>(EnergyPriceValue.class, testBaseFolderPath) + String[] expected = headLineElements + + when: + String[] actual = timeSeriesDeserializer.determineHeadLineElements() + + then: + actual == expected + } + + def "The IndividualTimeSeriesDeserializer determines the correct CsvFileDefinition"() { + given: + IndividualTimeSeriesDeserializer timeSeriesDeserializer = new IndividualTimeSeriesDeserializer<>(EnergyPriceValue.class, testBaseFolderPath) + CsvFileDefinition expected = new CsvFileDefinition("individual_timeseries_178892cf-500f-4e62-9d1f-ff9e3a92215e", headLineElements) + + when: + CsvFileDefinition actual = timeSeriesDeserializer.determineFileDefinition(UUID.fromString("178892cf-500f-4e62-9d1f-ff9e3a92215e")) + + then: + actual == expected + } + + def "The IndividualTimeSeriesDeserializer handles a single time based value correctly"() { + given: + IndividualTimeSeriesDeserializer timeSeriesDeserializer = new IndividualTimeSeriesDeserializer<>(EnergyPriceValue.class, testBaseFolderPath) + TimeBasedValue dut = new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:00:00") ,new EnergyPriceValue(Quantities.getQuantity(1d, EURO_PER_MEGAWATTHOUR))) + Map expected = [ + "uuid": "Egal - Michael Wendler", + "time": "2020-03-31 19:00:00", + "price": "1.0" + ] + + when: + LinkedHashMap actual = timeSeriesDeserializer.handleTimeBasedValue(dut) + + then: + /* The uuid is randomly generated here and therefore not checked */ + actual.size() == expected.size() + expected.forEach { k, v -> + if(k == "uuid") + assert actual.containsKey(k) + else + assert (v == actual.get(k)) + } + } + + def "The IndividualTimeSeriesDeserializer creates the correct file on deserialization"() { + given: + IndividualTimeSeriesDeserializer timeSeriesDeserializer = new IndividualTimeSeriesDeserializer<>(EnergyPriceValue.class, testBaseFolderPath) + + when: + timeSeriesDeserializer.deserialize(individualTimeSeries) + timeSeriesDeserializer.csvFileSink.dataConnector.shutdown() + + then: + new File(testBaseFolderPath).exists() + new File(testBaseFolderPath + File.separator + "individual_timeseries_178892cf-500f-4e62-9d1f-ff9e3a92215e.csv").exists() + } +} diff --git a/src/test/groovy/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessorTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessorTest.groovy index 2b932e0b3..620be925d 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessorTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessorTest.groovy @@ -27,29 +27,22 @@ class TimeBasedValueProcessorTest extends Specification { TimeTools.initialize(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd HH:mm:ss") } - def "The TimeBasedValueProcessor should de-serialize a provided time based EnergyPriceValue correctly"() { + def "The TimeBasedValueProcessor should de-serialize a provided TimeBasedValue correctly"() { given: - TimeBasedValueProcessor processor = new TimeBasedValueProcessor<>(EnergyPriceValue.class) + TimeBasedValueProcessor processor = new TimeBasedValueProcessor() EnergyPriceValue value = new EnergyPriceValue(Quantities.getQuantity(43.21, EURO_PER_MEGAWATTHOUR)) ZonedDateTime time = TimeTools.toZonedDateTime("2020-03-27 15:29:14") - TimeBasedValue timeBasedValue = new TimeBasedValue<>(time, value) + TimeBasedValue timeBasedValue = new TimeBasedValue<>(UUID.fromString("e6b3483c-936f-4168-9917-dc3b2e8bdf2c"), time, value) Map expected = [ - "uuid" : "has random uuid", + "uuid" : "e6b3483c-936f-4168-9917-dc3b2e8bdf2c", "time" : "2020-03-27 15:29:14", - "price" : "43.21" ] when: Optional> actual = processor.handleEntity(timeBasedValue) then: - actual.isPresent() - LinkedHashMap result = actual.get() - expected.forEach { k, v -> - if(k == "uuid") - assert result.containsKey(k) - else - assert (v == result.get(k)) - } + actual.present + actual.get() == expected } } From 7565edfc5b8e068e848bf25a7ef848c8a09ec18b Mon Sep 17 00:00:00 2001 From: "Kittl, Chris" Date: Wed, 1 Apr 2020 08:49:38 +0200 Subject: [PATCH 11/13] Addressing Codacy issues --- .../ie3/datamodel/io/processor/Processor.java | 33 ++++++++++--------- .../TimeSeriesDeserializerTest.groovy | 25 +++++++------- .../input/InputEntityProcessorTest.groovy | 10 +++--- .../input/TimeBasedValueProcessorTest.groovy | 4 +-- .../datamodel/io/sink/CsvFileSinkTest.groovy | 8 ++--- 5 files changed, 41 insertions(+), 39 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/processor/Processor.java b/src/main/java/edu/ie3/datamodel/io/processor/Processor.java index c912d79d8..6966a441d 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/Processor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/Processor.java @@ -34,28 +34,17 @@ * @param Type parameter of the class to handle */ public abstract class Processor { - /** - * Comparator to sort a Map of field name to getter method, so that the first entry is the uuid - * and the rest is sorted alphabetically. - */ - private static class UuidFirstComparator implements Comparator { - @Override - public int compare(String a, String b) { - if (a.equalsIgnoreCase(UniqueEntity.UUID_FIELD_NAME)) return -1; - else return a.compareTo(b); - } - } - protected static final Logger logger = LoggerFactory.getLogger(Processor.class); + protected final Class registeredClass; /* Quantities associated to those fields must be treated differently (e.g. input and result), all other quantity / * field combinations can be treated on a common basis and therefore need no further distinction */ private static final Set specificQuantityFieldNames = - Collections.unmodifiableSet( - new HashSet<>( - Arrays.asList( - "eConsAnnual", "energy", "eStorage", "q", "p", "pMax", "pOwn", "pThermal"))); + Collections.unmodifiableSet( + new HashSet<>( + Arrays.asList( + "eConsAnnual", "energy", "eStorage", "q", "p", "pMax", "pOwn", "pThermal"))); private static final GeoJsonWriter geoJsonWriter = new GeoJsonWriter(); @@ -67,6 +56,18 @@ public int compare(String a, String b) { private static final String VOLT_LVL = NodeInputFactory.VOLT_LVL; private static final String V_RATED = NodeInputFactory.V_RATED; + /** + * Comparator to sort a Map of field name to getter method, so that the first entry is the uuid + * and the rest is sorted alphabetically. + */ + private static class UuidFirstComparator implements Comparator { + @Override + public int compare(String a, String b) { + if (a.equalsIgnoreCase(UniqueEntity.UUID_FIELD_NAME)) return -1; + else return a.compareTo(b); + } + } + /** * Instantiates a Processor for a foreseen class * diff --git a/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy index 25db53551..b39f3b7c2 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy @@ -5,6 +5,8 @@ */ package edu.ie3.datamodel.io.deserialize +import static edu.ie3.util.quantities.PowerSystemUnits.EURO_PER_MEGAWATTHOUR + import edu.ie3.datamodel.io.CsvFileDefinition import edu.ie3.datamodel.models.timeseries.IndividualTimeSeries import edu.ie3.datamodel.models.value.EnergyPriceValue @@ -17,8 +19,6 @@ import tec.uom.se.quantity.Quantities import java.time.ZoneId -import static edu.ie3.util.quantities.PowerSystemUnits.EURO_PER_MEGAWATTHOUR - class TimeSeriesDeserializerTest extends Specification { static { TimeTools.initialize(ZoneId.of("UTC"), Locale.GERMANY, "yyyy-MM-dd HH:mm:ss") @@ -37,10 +37,10 @@ class TimeSeriesDeserializerTest extends Specification { individualTimeSeries = new IndividualTimeSeries<>( UUID.fromString("178892cf-500f-4e62-9d1f-ff9e3a92215e"), [ - new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:00:00") ,new EnergyPriceValue(Quantities.getQuantity(1d, EURO_PER_MEGAWATTHOUR))), - new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:15:00") ,new EnergyPriceValue(Quantities.getQuantity(2d, EURO_PER_MEGAWATTHOUR))), - new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:30:00") ,new EnergyPriceValue(Quantities.getQuantity(3d, EURO_PER_MEGAWATTHOUR))), - new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:45:00") ,new EnergyPriceValue(Quantities.getQuantity(4d, EURO_PER_MEGAWATTHOUR))) + new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:00:00"), new EnergyPriceValue(Quantities.getQuantity(1d, EURO_PER_MEGAWATTHOUR))), + new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:15:00"), new EnergyPriceValue(Quantities.getQuantity(2d, EURO_PER_MEGAWATTHOUR))), + new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:30:00"), new EnergyPriceValue(Quantities.getQuantity(3d, EURO_PER_MEGAWATTHOUR))), + new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:45:00"), new EnergyPriceValue(Quantities.getQuantity(4d, EURO_PER_MEGAWATTHOUR))) ] ) @@ -82,7 +82,7 @@ class TimeSeriesDeserializerTest extends Specification { def "The IndividualTimeSeriesDeserializer handles a single time based value correctly"() { given: IndividualTimeSeriesDeserializer timeSeriesDeserializer = new IndividualTimeSeriesDeserializer<>(EnergyPriceValue.class, testBaseFolderPath) - TimeBasedValue dut = new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:00:00") ,new EnergyPriceValue(Quantities.getQuantity(1d, EURO_PER_MEGAWATTHOUR))) + TimeBasedValue dut = new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:00:00"), new EnergyPriceValue(Quantities.getQuantity(1d, EURO_PER_MEGAWATTHOUR))) Map expected = [ "uuid": "Egal - Michael Wendler", "time": "2020-03-31 19:00:00", @@ -96,16 +96,17 @@ class TimeSeriesDeserializerTest extends Specification { /* The uuid is randomly generated here and therefore not checked */ actual.size() == expected.size() expected.forEach { k, v -> - if(k == "uuid") - assert actual.containsKey(k) - else - assert (v == actual.get(k)) + if (k == "uuid") { + assert actual.containsKey(k) + } else { + assert (v == actual.get(k)) + } } } def "The IndividualTimeSeriesDeserializer creates the correct file on deserialization"() { given: - IndividualTimeSeriesDeserializer timeSeriesDeserializer = new IndividualTimeSeriesDeserializer<>(EnergyPriceValue.class, testBaseFolderPath) + IndividualTimeSeriesDeserializer timeSeriesDeserializer = new IndividualTimeSeriesDeserializer<>(EnergyPriceValue, testBaseFolderPath) when: timeSeriesDeserializer.deserialize(individualTimeSeries) diff --git a/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy index 62710a995..55b0f0aa5 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy @@ -289,7 +289,7 @@ class InputEntityProcessorTest extends Specification { ] } - def "The InputEntityProcessor should de-serialize a provided NodeGraphicInput with point correctly"(){ + def "The InputEntityProcessor should de-serialize a provided NodeGraphicInput with point correctly"() { given: InputEntityProcessor processor = new InputEntityProcessor(NodeGraphicInput.class) NodeGraphicInput validNode = GridTestData.nodeGraphicC @@ -309,7 +309,7 @@ class InputEntityProcessorTest extends Specification { actual.get() == expected } - def "The InputEntityProcessor should de-serialize a provided NodeGraphicInput with path correctly"(){ + def "The InputEntityProcessor should de-serialize a provided NodeGraphicInput with path correctly"() { given: InputEntityProcessor processor = new InputEntityProcessor(NodeGraphicInput.class) NodeGraphicInput validNode = GridTestData.nodeGraphicD @@ -329,9 +329,9 @@ class InputEntityProcessorTest extends Specification { actual.get() == expected } - def "The InputEntityProcessor should de-serialize a provided LineGraphicInput correctly"(){ + def "The InputEntityProcessor should de-serialize a provided LineGraphicInput correctly"() { given: - InputEntityProcessor processor = new InputEntityProcessor(LineGraphicInput.class) + InputEntityProcessor processor = new InputEntityProcessor(LineGraphicInput) LineGraphicInput validNode = GridTestData.lineGraphicCtoD Map expected = [ "uuid" : "ece86139-3238-4a35-9361-457ecb4258b0", @@ -350,7 +350,7 @@ class InputEntityProcessorTest extends Specification { def "The InputEntityProcessor should de-serialize a provided OperatorInput correctly"() { given: - InputEntityProcessor processor = new InputEntityProcessor(OperatorInput.class) + InputEntityProcessor processor = new InputEntityProcessor(OperatorInput) OperatorInput operator = new OperatorInput(UUID.fromString("420ee39c-dd5a-4d9c-9156-23dbdef13e5e"), "Prof. Brokkoli") Map expected = [ "uuid" : "420ee39c-dd5a-4d9c-9156-23dbdef13e5e", diff --git a/src/test/groovy/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessorTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessorTest.groovy index 620be925d..81ae0c928 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessorTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/processor/input/TimeBasedValueProcessorTest.groovy @@ -5,13 +5,13 @@ */ package edu.ie3.datamodel.io.processor.input +import static edu.ie3.util.quantities.PowerSystemUnits.* + import edu.ie3.util.TimeTools import java.time.ZoneId import java.time.ZonedDateTime -import static edu.ie3.util.quantities.PowerSystemUnits.* - import edu.ie3.datamodel.models.value.EnergyPriceValue import edu.ie3.datamodel.models.value.TimeBasedValue import spock.lang.Specification diff --git a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy index 2adb7491e..fdda22426 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy @@ -36,9 +36,9 @@ class CsvFileSinkTest extends Specification { ResultEntityProcessor evResultEntityProcessor = new ResultEntityProcessor(EvResult) ResultEntityProcessor wecResultEntityProcessor = new ResultEntityProcessor(WecResult) - pvResultFileDefinition = new CsvFileDefinition(fileNamingStrategy.getFileName(PvResult).get(), pvResultEntityProcessor.getHeaderElements()) - evResultFileDefinition = new CsvFileDefinition(fileNamingStrategy.getFileName(EvResult).get(), evResultEntityProcessor.getHeaderElements()) - wecResultFileDefinition = new CsvFileDefinition(fileNamingStrategy.getFileName(WecResult).get(), wecResultEntityProcessor.getHeaderElements()) + pvResultFileDefinition = new CsvFileDefinition(fileNamingStrategy.getFileName(PvResult).get(), pvResultEntityProcessor.headerElements) + evResultFileDefinition = new CsvFileDefinition(fileNamingStrategy.getFileName(EvResult).get(), evResultEntityProcessor.headerElements) + wecResultFileDefinition = new CsvFileDefinition(fileNamingStrategy.getFileName(WecResult).get(), wecResultEntityProcessor.headerElements) } def cleanup() { @@ -151,7 +151,7 @@ class CsvFileSinkTest extends Specification { then: SinkException exception = thrown(SinkException) - exception.getMessage() == "Cannot find a matching writer for file definition: \"CsvFileDefinition{fileName='wec_res', fileExtension='csv', headLineElements=[uuid, inputModel, p, q, timestamp]}\"." + exception.message == "Cannot find a matching writer for file definition: \"CsvFileDefinition{fileName='wec_res', fileExtension='csv', headLineElements=[uuid, inputModel, p, q, timestamp]}\"." } def "A valid CsvFileSink registers a new destination if the provided destination is not registered and later registration is allowed"() { From 48dc58bd9994a9521da915a7c7a44e7c09a96f2e Mon Sep 17 00:00:00 2001 From: "Kittl, Chris" Date: Wed, 1 Apr 2020 08:57:33 +0200 Subject: [PATCH 12/13] Formatting --- .../java/edu/ie3/datamodel/io/processor/Processor.java | 8 ++++---- .../io/deserialize/TimeSeriesDeserializerTest.groovy | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/processor/Processor.java b/src/main/java/edu/ie3/datamodel/io/processor/Processor.java index 6966a441d..8ffe8050d 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/Processor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/Processor.java @@ -41,10 +41,10 @@ public abstract class Processor { /* Quantities associated to those fields must be treated differently (e.g. input and result), all other quantity / * field combinations can be treated on a common basis and therefore need no further distinction */ private static final Set specificQuantityFieldNames = - Collections.unmodifiableSet( - new HashSet<>( - Arrays.asList( - "eConsAnnual", "energy", "eStorage", "q", "p", "pMax", "pOwn", "pThermal"))); + Collections.unmodifiableSet( + new HashSet<>( + Arrays.asList( + "eConsAnnual", "energy", "eStorage", "q", "p", "pMax", "pOwn", "pThermal"))); private static final GeoJsonWriter geoJsonWriter = new GeoJsonWriter(); diff --git a/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy index b39f3b7c2..962503147 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy @@ -97,10 +97,10 @@ class TimeSeriesDeserializerTest extends Specification { actual.size() == expected.size() expected.forEach { k, v -> if (k == "uuid") { - assert actual.containsKey(k) - } else { - assert (v == actual.get(k)) - } + assert actual.containsKey(k) + } else { + assert (v == actual.get(k)) + } } } From 91cfd222cc4f3704b2ac2a1146d4b071a9e51799 Mon Sep 17 00:00:00 2001 From: "Kittl, Chris" Date: Wed, 1 Apr 2020 09:06:56 +0200 Subject: [PATCH 13/13] Once again making Codacy happy --- .../io/deserialize/TimeSeriesDeserializerTest.groovy | 6 +++--- .../io/processor/input/InputEntityProcessorTest.groovy | 4 ++-- src/test/groovy/edu/ie3/test/common/ComplexTopology.groovy | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy index 962503147..47c1030f9 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/deserialize/TimeSeriesDeserializerTest.groovy @@ -57,7 +57,7 @@ class TimeSeriesDeserializerTest extends Specification { def "The IndividualTimeSeriesDeserializer determines the headline elements correctly"() { given: - IndividualTimeSeriesDeserializer timeSeriesDeserializer = new IndividualTimeSeriesDeserializer<>(EnergyPriceValue.class, testBaseFolderPath) + IndividualTimeSeriesDeserializer timeSeriesDeserializer = new IndividualTimeSeriesDeserializer<>(EnergyPriceValue, testBaseFolderPath) String[] expected = headLineElements when: @@ -69,7 +69,7 @@ class TimeSeriesDeserializerTest extends Specification { def "The IndividualTimeSeriesDeserializer determines the correct CsvFileDefinition"() { given: - IndividualTimeSeriesDeserializer timeSeriesDeserializer = new IndividualTimeSeriesDeserializer<>(EnergyPriceValue.class, testBaseFolderPath) + IndividualTimeSeriesDeserializer timeSeriesDeserializer = new IndividualTimeSeriesDeserializer<>(EnergyPriceValue, testBaseFolderPath) CsvFileDefinition expected = new CsvFileDefinition("individual_timeseries_178892cf-500f-4e62-9d1f-ff9e3a92215e", headLineElements) when: @@ -81,7 +81,7 @@ class TimeSeriesDeserializerTest extends Specification { def "The IndividualTimeSeriesDeserializer handles a single time based value correctly"() { given: - IndividualTimeSeriesDeserializer timeSeriesDeserializer = new IndividualTimeSeriesDeserializer<>(EnergyPriceValue.class, testBaseFolderPath) + IndividualTimeSeriesDeserializer timeSeriesDeserializer = new IndividualTimeSeriesDeserializer<>(EnergyPriceValue, testBaseFolderPath) TimeBasedValue dut = new TimeBasedValue<>(TimeTools.toZonedDateTime("2020-03-31 19:00:00"), new EnergyPriceValue(Quantities.getQuantity(1d, EURO_PER_MEGAWATTHOUR))) Map expected = [ "uuid": "Egal - Michael Wendler", diff --git a/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy index 55b0f0aa5..cc7f8301c 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy @@ -291,7 +291,7 @@ class InputEntityProcessorTest extends Specification { def "The InputEntityProcessor should de-serialize a provided NodeGraphicInput with point correctly"() { given: - InputEntityProcessor processor = new InputEntityProcessor(NodeGraphicInput.class) + InputEntityProcessor processor = new InputEntityProcessor(NodeGraphicInput) NodeGraphicInput validNode = GridTestData.nodeGraphicC Map expected = [ "uuid" : "09aec636-791b-45aa-b981-b14edf171c4c", @@ -311,7 +311,7 @@ class InputEntityProcessorTest extends Specification { def "The InputEntityProcessor should de-serialize a provided NodeGraphicInput with path correctly"() { given: - InputEntityProcessor processor = new InputEntityProcessor(NodeGraphicInput.class) + InputEntityProcessor processor = new InputEntityProcessor(NodeGraphicInput) NodeGraphicInput validNode = GridTestData.nodeGraphicD Map expected = [ "uuid" : "9ecad435-bd16-4797-a732-762c09d4af25", diff --git a/src/test/groovy/edu/ie3/test/common/ComplexTopology.groovy b/src/test/groovy/edu/ie3/test/common/ComplexTopology.groovy index ea30b7ac9..b5b67fff7 100644 --- a/src/test/groovy/edu/ie3/test/common/ComplexTopology.groovy +++ b/src/test/groovy/edu/ie3/test/common/ComplexTopology.groovy @@ -217,7 +217,7 @@ class ComplexTopology extends GridTestData { ) DirectedMultigraph mutableGraph = - new DirectedMultigraph<>(SubGridGate.class) + new DirectedMultigraph<>(SubGridGate) /* Add all edges */ expectedSubGrids.values().forEach({subGrid -> mutableGraph.addVertex(subGrid)})