From 6000d32d03d8be4282d279dda87e475f2e693d01 Mon Sep 17 00:00:00 2001 From: dennis Date: Mon, 8 Jun 2020 20:43:37 +0200 Subject: [PATCH 01/30] Adds Regex to modify JSON Stings to match csv spec. --- .../java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java b/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java index b43a7780f..475e16895 100644 --- a/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java +++ b/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java @@ -21,7 +21,7 @@ public class BufferedCsvWriter extends BufferedWriter { /** Information on the shape of the file */ private final CsvFileDefinition fileDefinition; /** True, if every entry should be quoted */ - private final boolean quoted; + private final boolean quoted = true; /** * Build a new CsvBufferedWriter @@ -47,7 +47,6 @@ public BufferedCsvWriter( baseFolder + File.separator + fileDefinition.getFilePath(), append), StandardCharsets.UTF_8)); this.fileDefinition = fileDefinition; - this.quoted = quoted; if (writeHeader) writeFileHeader(fileDefinition.headLineElements); } @@ -106,6 +105,11 @@ private void writeFileHeader(String[] headLineElements) throws IOException { private void writeOneLine(String[] entries) throws IOException { for (int i = 0; i < entries.length; i++) { String attribute = entries[i]; + if (attribute.matches("^\"\\{(?:.*)\\}\"$")) { + attribute = attribute.replaceAll("\"", "\"\""); + attribute = attribute.substring(1, attribute.length() - 1); + } + System.out.println(attribute); super.append(attribute); if (i + 1 < entries.length) { super.append(fileDefinition.csvSep); From 3d48c8b3569b179fe8df39797d76ba383d7f3743 Mon Sep 17 00:00:00 2001 From: dennis Date: Tue, 9 Jun 2020 12:53:37 +0200 Subject: [PATCH 02/30] -Factory removes double double quotes to match standard format -Adjusts Tests --- .../ie3/datamodel/io/factory/EntityData.java | 4 +- .../factory/input/LineInputFactoryTest.groovy | 38 ++++++++++++++++++ .../factory/input/NodeInputFactoryTest.groovy | 39 +++++++++++++++++++ .../ie3/test/helper/FactoryTestHelper.groovy | 1 + 4 files changed, 80 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java index 499ae1a6e..127acd853 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java @@ -80,7 +80,7 @@ public String getField(String field) { if (!fieldsToAttributes.containsKey(field)) throw new FactoryException(String.format("Field \"%s\" not found in EntityData", field)); - return fieldsToAttributes.get(field); + return fieldsToAttributes.get(field).replaceAll("\"\"", "\""); } /** @@ -92,7 +92,7 @@ public String getField(String field) { public Optional getFieldOptional(String field) { if (!fieldsToAttributes.containsKey(field)) return Optional.empty(); - return Optional.of(fieldsToAttributes.get(field)); + return Optional.of(fieldsToAttributes.get(field).replaceAll("\"\"", "\"")); } /** diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy index 1b40edc53..995bd5b1f 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy @@ -174,4 +174,42 @@ class LineInputFactoryTest extends Specification implements FactoryTestHelper { "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.49228],[7.411111, 51.49228],[7.411111, 51.49228],[7.411111, 51.49228]]}" | _ "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.49228],[7.411111, 51.49228],[7.311111, 51.49228],[7.511111, 51.49228]]}" | _ } + + def "A LineInputFactory should parse a valid LineInput with different double double quoted geoPosition strings correctly"() { + given: "a line input factory and model data" + def inputFactory = new LineInputFactory() + Map parameter = [ + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "operatesfrom" : "2019-01-01T00:00:00+01:00[Europe/Berlin]", + "operatesuntil" : "", + "id" : "TestID", + "paralleldevices" : "2", + "length" : "3", + "geoposition" : geoLineString, + "olmcharacteristic": "olm:{(0.0,1.0)}" + ] + def inputClass = LineInput + def operatorInput = Mock(OperatorInput) + def nodeInputA = Mock(NodeInput) + nodeInputA.getGeoPosition() >> NodeInput.DEFAULT_GEO_POSITION + def nodeInputB = Mock(NodeInput) + nodeInputB.getGeoPosition() >> NodeInput.DEFAULT_GEO_POSITION + def typeInput = Mock(LineTypeInput) + + when: + Optional input = inputFactory.getEntity(new TypedConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) + + then: + input.present + input.get().getClass() == inputClass + ((LineInput) input.get()).with { + assert geoPosition == GridAndGeoUtils.buildSafeLineString(getGeometry(parameter["geoposition"]) as LineString) + } + + where: + geoLineString | _ + "{ \"\"type\"\": \"\"LineString\"\", \"\"coordinates\"\": [[7.411111, 51.49228],[7.411111, 51.49228]]}" | _ + "{ \"\"type\"\": \"\"LineString\"\", \"\"coordinates\"\": [[7.411111, 51.49228],[7.411111, 51.49228],[7.411111, 51.49228],[7.411111, 51.49228]]}" | _ + "{ \"\"type\"\": \"\"LineString\"\", \"\"coordinates\"\": [[7.411111, 51.49228],[7.411111, 51.49228],[7.311111, 51.49228],[7.511111, 51.49228]]}" | _ + } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy index 072ed921c..219393240 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy @@ -64,4 +64,43 @@ class NodeInputFactoryTest extends Specification implements FactoryTestHelper { assert subnet == Integer.parseInt(parameter["subnet"]) } } + + def "A NodeInputFactory should parse a valid NodeInput with double double quotes correctly"() { + given: "a system participant input type factory and model data" + def inputFactory = new NodeInputFactory() + Map parameter = [ + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "operatesfrom" : "2019-01-01T00:00:00+01:00[Europe/Berlin]", + "operatesuntil": "", + "id" : "TestID", + "vtarget" : "2", + "vrated" : "3", + "slack" : "true", + "geoposition" : "{ \"\"type\"\": \"\"Point\"\", \"\"coordinates\"\": [7.411111, 51.492528] }", + "voltlvl" : "lv", + "subnet" : "7" + ] + def inputClass = NodeInput + def operatorInput = Mock(OperatorInput) + + when: + Optional input = inputFactory.getEntity(new AssetInputEntityData(parameter, inputClass, operatorInput)) + + then: + input.present + input.get().getClass() == inputClass + ((NodeInput) input.get()).with { + assert uuid == UUID.fromString(parameter["uuid"]) + assert operationTime.startDate.present + assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) + assert !operationTime.endDate.present + assert operator == operatorInput + assert id == parameter["id"] + assert vTarget == getQuant(parameter["vtarget"], StandardUnits.TARGET_VOLTAGE_MAGNITUDE) + assert slack + assert geoPosition == getGeometry(parameter["geoposition"]) + assert voltLvl == GermanVoltageLevelUtils.parse(parameter["voltlvl"], getQuant(parameter["vrated"], StandardUnits.RATED_VOLTAGE_MAGNITUDE) as ComparableQuantity) + assert subnet == Integer.parseInt(parameter["subnet"]) + } + } } diff --git a/src/test/groovy/edu/ie3/test/helper/FactoryTestHelper.groovy b/src/test/groovy/edu/ie3/test/helper/FactoryTestHelper.groovy index 399d82b14..1fa01c733 100644 --- a/src/test/groovy/edu/ie3/test/helper/FactoryTestHelper.groovy +++ b/src/test/groovy/edu/ie3/test/helper/FactoryTestHelper.groovy @@ -18,6 +18,7 @@ trait FactoryTestHelper { } static getGeometry(String value) { + value = value.replaceAll("\"\"", "\"") return GEOJSON_READER.read(value) } } From c99a5aaa0156491b8c01e4f878c84b83f9733103 Mon Sep 17 00:00:00 2001 From: dennis Date: Tue, 9 Jun 2020 13:50:34 +0200 Subject: [PATCH 03/30] Applies Spotless --- .../datamodel/io/csv/BufferedCsvWriter.java | 5 +++-- .../factory/input/LineInputFactoryTest.groovy | 16 +++++++-------- .../factory/input/NodeInputFactoryTest.groovy | 20 +++++++++---------- 3 files changed, 21 insertions(+), 20 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java b/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java index 475e16895..7bbd84e60 100644 --- a/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java +++ b/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java @@ -21,7 +21,7 @@ public class BufferedCsvWriter extends BufferedWriter { /** Information on the shape of the file */ private final CsvFileDefinition fileDefinition; /** True, if every entry should be quoted */ - private final boolean quoted = true; + private final boolean quoted; /** * Build a new CsvBufferedWriter @@ -47,6 +47,7 @@ public BufferedCsvWriter( baseFolder + File.separator + fileDefinition.getFilePath(), append), StandardCharsets.UTF_8)); this.fileDefinition = fileDefinition; + this.quoted = true; if (writeHeader) writeFileHeader(fileDefinition.headLineElements); } @@ -63,7 +64,7 @@ public BufferedCsvWriter( public BufferedCsvWriter( String baseFolder, CsvFileDefinition fileDefinition, boolean writeHeader, boolean append) throws IOException { - this(baseFolder, fileDefinition, false, writeHeader, append); + this(baseFolder, fileDefinition, true, writeHeader, append); } /** diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy index 995bd5b1f..39a06d942 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy @@ -179,14 +179,14 @@ class LineInputFactoryTest extends Specification implements FactoryTestHelper { given: "a line input factory and model data" def inputFactory = new LineInputFactory() Map parameter = [ - "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", - "operatesfrom" : "2019-01-01T00:00:00+01:00[Europe/Berlin]", - "operatesuntil" : "", - "id" : "TestID", - "paralleldevices" : "2", - "length" : "3", - "geoposition" : geoLineString, - "olmcharacteristic": "olm:{(0.0,1.0)}" + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "operatesfrom" : "2019-01-01T00:00:00+01:00[Europe/Berlin]", + "operatesuntil" : "", + "id" : "TestID", + "paralleldevices" : "2", + "length" : "3", + "geoposition" : geoLineString, + "olmcharacteristic": "olm:{(0.0,1.0)}" ] def inputClass = LineInput def operatorInput = Mock(OperatorInput) diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy index 219393240..499a6e093 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy @@ -69,16 +69,16 @@ class NodeInputFactoryTest extends Specification implements FactoryTestHelper { given: "a system participant input type factory and model data" def inputFactory = new NodeInputFactory() Map parameter = [ - "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", - "operatesfrom" : "2019-01-01T00:00:00+01:00[Europe/Berlin]", - "operatesuntil": "", - "id" : "TestID", - "vtarget" : "2", - "vrated" : "3", - "slack" : "true", - "geoposition" : "{ \"\"type\"\": \"\"Point\"\", \"\"coordinates\"\": [7.411111, 51.492528] }", - "voltlvl" : "lv", - "subnet" : "7" + "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", + "operatesfrom" : "2019-01-01T00:00:00+01:00[Europe/Berlin]", + "operatesuntil": "", + "id" : "TestID", + "vtarget" : "2", + "vrated" : "3", + "slack" : "true", + "geoposition" : "{ \"\"type\"\": \"\"Point\"\", \"\"coordinates\"\": [7.411111, 51.492528] }", + "voltlvl" : "lv", + "subnet" : "7" ] def inputClass = NodeInput def operatorInput = Mock(OperatorInput) From b99df924e49e564d163846b19c8e2c7fdfb4bc8d Mon Sep 17 00:00:00 2001 From: dennis Date: Mon, 15 Jun 2020 19:03:56 +0200 Subject: [PATCH 04/30] Reworks quoting process --- .../io/connectors/CsvFileConnector.java | 4 +- .../datamodel/io/csv/BufferedCsvWriter.java | 34 +--------------- .../ie3/datamodel/io/factory/EntityData.java | 2 +- .../ie3/datamodel/io/sink/CsvFileSink.java | 34 +++++++++++++--- .../factory/input/LineInputFactoryTest.groovy | 38 ------------------ .../factory/input/NodeInputFactoryTest.groovy | 39 ------------------- .../ie3/test/helper/FactoryTestHelper.groovy | 1 - 7 files changed, 34 insertions(+), 118 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java b/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java index 15322a26b..f380d2d1f 100644 --- a/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java +++ b/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java @@ -107,13 +107,13 @@ private BufferedCsvWriter initWriter(String baseFolder, CsvFileDefinition fileDe File pathFile = new File(fullPathToFile); if (!pathFile.exists()) { - return new BufferedCsvWriter(baseFolder, fileDefinition, false, true, false); + return new BufferedCsvWriter(baseFolder, fileDefinition, true, false); } log.warn( "File '{}.csv' already exist. Will append new content WITHOUT new header! Full path: {}", fileDefinition.getFileName(), pathFile.getAbsolutePath()); - return new BufferedCsvWriter(baseFolder, fileDefinition, false, false, true); + return new BufferedCsvWriter(baseFolder, fileDefinition, false, true); } /** diff --git a/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java b/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java index 7bbd84e60..41686e53a 100644 --- a/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java +++ b/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java @@ -20,26 +20,18 @@ public class BufferedCsvWriter extends BufferedWriter { /** Information on the shape of the file */ private final CsvFileDefinition fileDefinition; - /** True, if every entry should be quoted */ - private final boolean quoted; - /** * Build a new CsvBufferedWriter * * @param baseFolder Base folder, from where the file hierarchy should start * @param fileDefinition The foreseen shape of the file * @param writeHeader Toggles, if the head line is written or not - * @param quoted True, if the entries may be quoted * @param append true to append to an existing file, false to overwrite an existing file (if any), * if no file exists, a new one will be created in both cases * @throws IOException If the FileOutputStream cannot be established. */ public BufferedCsvWriter( - String baseFolder, - CsvFileDefinition fileDefinition, - boolean quoted, - boolean writeHeader, - boolean append) + String baseFolder, CsvFileDefinition fileDefinition, boolean writeHeader, boolean append) throws IOException { super( new OutputStreamWriter( @@ -47,26 +39,9 @@ public BufferedCsvWriter( baseFolder + File.separator + fileDefinition.getFilePath(), append), StandardCharsets.UTF_8)); this.fileDefinition = fileDefinition; - this.quoted = true; if (writeHeader) writeFileHeader(fileDefinition.headLineElements); } - /** - * Build a new CsvBufferedWriter. All entries are quoted - * - * @param baseFolder Base folder, from where the file hierarchy should start - * @param fileDefinition The foreseen shape of the file - * @param writeHeader Toggles, if the head line is written or not - * @param append true to append to an existing file, false to overwrite an existing file (if any), - * if no file exists, a new one will be created in both cases - * @throws IOException If the FileOutputStream cannot be established. - */ - public BufferedCsvWriter( - String baseFolder, CsvFileDefinition fileDefinition, boolean writeHeader, boolean append) - throws IOException { - this(baseFolder, fileDefinition, true, writeHeader, append); - } - /** * Actually persisting the provided entity field data * @@ -85,7 +60,7 @@ public void write(Map entityFieldData) throws IOException, SinkE + "'."); String[] entries = entityFieldData.values().toArray(new String[0]); - writeOneLine(quoted ? StringUtils.quote(entries) : entries); + writeOneLine(entries); } /** @@ -106,11 +81,6 @@ private void writeFileHeader(String[] headLineElements) throws IOException { private void writeOneLine(String[] entries) throws IOException { for (int i = 0; i < entries.length; i++) { String attribute = entries[i]; - if (attribute.matches("^\"\\{(?:.*)\\}\"$")) { - attribute = attribute.replaceAll("\"", "\"\""); - attribute = attribute.substring(1, attribute.length() - 1); - } - System.out.println(attribute); super.append(attribute); if (i + 1 < entries.length) { super.append(fileDefinition.csvSep); diff --git a/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java index 127acd853..6dfa901ca 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java @@ -80,7 +80,7 @@ public String getField(String field) { if (!fieldsToAttributes.containsKey(field)) throw new FactoryException(String.format("Field \"%s\" not found in EntityData", field)); - return fieldsToAttributes.get(field).replaceAll("\"\"", "\""); + return fieldsToAttributes.get(field); } /** diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index bd5d75fe9..6f560140d 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -147,7 +147,7 @@ public void persist(T entity) { @Override public void persistIgnoreNested(C entity) { - LinkedHashMap entityFieldData = new LinkedHashMap<>(); + LinkedHashMap entityFieldData; try { entityFieldData = processorProvider @@ -166,7 +166,8 @@ public void persistIgnoreNested(C entity) { String[] headerElements = processorProvider.getHeaderElements(entity.getClass()); BufferedCsvWriter writer = connector.getOrInitWriter(entity.getClass(), headerElements, csvSep); - writer.write(entityFieldData); + LinkedHashMap quotedEntityFieldData = quoteCSVStrings(entityFieldData); + writer.write(quotedEntityFieldData); } catch (ProcessorProviderException e) { log.error( "Exception occurred during receiving of header elements. Cannot write this element.", e); @@ -182,6 +183,31 @@ public void persistIgnoreNested(C entity) { } } + /** + * Quotes all fields that contain special characters to comply with the CSV specification RFC 4180 + * (https://tools.ietf.org/html/rfc4180) The " contained in the JSON strings are escaped with the + * same character to make the CSV data readable later + * + * @param entityFieldData LinkedHashMap containing all entityData + * @return LinkedHashMap containing all entityData with the relevant data quoted + */ + private LinkedHashMap quoteCSVStrings( + LinkedHashMap entityFieldData) { + for (Map.Entry entry : entityFieldData.entrySet()) { + String key = entry.getKey(); + String value = entry.getValue(); + if (value.matches("(?:.*)\\{(?:.*)}")) { + entityFieldData.put( + key, + value + .replaceAll("\"", "\"\"") + .replaceAll("^([^\"])", "\"$1") + .replaceAll("([^\"])$", "$1\"")); + } + } + return entityFieldData; + } + @Override public void persistAllIgnoreNested(Collection entities) { entities.parallelStream().forEach(this::persistIgnoreNested); @@ -229,9 +255,7 @@ public void persistJointGrid(JointGridContainer jointGridContainer) { wecPlants) .flatMap(Collection::stream) .map( - entityWithType -> - Extractor.extractType( - entityWithType)) // due to a bug in java 8 this *cannot* be replaced with + Extractor::extractType) // due to a bug in java 8 this *cannot* be replaced with // method reference! .collect(Collectors.toSet()); diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy index 39a06d942..1b40edc53 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/LineInputFactoryTest.groovy @@ -174,42 +174,4 @@ class LineInputFactoryTest extends Specification implements FactoryTestHelper { "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.49228],[7.411111, 51.49228],[7.411111, 51.49228],[7.411111, 51.49228]]}" | _ "{ \"type\": \"LineString\", \"coordinates\": [[7.411111, 51.49228],[7.411111, 51.49228],[7.311111, 51.49228],[7.511111, 51.49228]]}" | _ } - - def "A LineInputFactory should parse a valid LineInput with different double double quoted geoPosition strings correctly"() { - given: "a line input factory and model data" - def inputFactory = new LineInputFactory() - Map parameter = [ - "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", - "operatesfrom" : "2019-01-01T00:00:00+01:00[Europe/Berlin]", - "operatesuntil" : "", - "id" : "TestID", - "paralleldevices" : "2", - "length" : "3", - "geoposition" : geoLineString, - "olmcharacteristic": "olm:{(0.0,1.0)}" - ] - def inputClass = LineInput - def operatorInput = Mock(OperatorInput) - def nodeInputA = Mock(NodeInput) - nodeInputA.getGeoPosition() >> NodeInput.DEFAULT_GEO_POSITION - def nodeInputB = Mock(NodeInput) - nodeInputB.getGeoPosition() >> NodeInput.DEFAULT_GEO_POSITION - def typeInput = Mock(LineTypeInput) - - when: - Optional input = inputFactory.getEntity(new TypedConnectorInputEntityData(parameter, inputClass, operatorInput, nodeInputA, nodeInputB, typeInput)) - - then: - input.present - input.get().getClass() == inputClass - ((LineInput) input.get()).with { - assert geoPosition == GridAndGeoUtils.buildSafeLineString(getGeometry(parameter["geoposition"]) as LineString) - } - - where: - geoLineString | _ - "{ \"\"type\"\": \"\"LineString\"\", \"\"coordinates\"\": [[7.411111, 51.49228],[7.411111, 51.49228]]}" | _ - "{ \"\"type\"\": \"\"LineString\"\", \"\"coordinates\"\": [[7.411111, 51.49228],[7.411111, 51.49228],[7.411111, 51.49228],[7.411111, 51.49228]]}" | _ - "{ \"\"type\"\": \"\"LineString\"\", \"\"coordinates\"\": [[7.411111, 51.49228],[7.411111, 51.49228],[7.311111, 51.49228],[7.511111, 51.49228]]}" | _ - } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy index 499a6e093..072ed921c 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/factory/input/NodeInputFactoryTest.groovy @@ -64,43 +64,4 @@ class NodeInputFactoryTest extends Specification implements FactoryTestHelper { assert subnet == Integer.parseInt(parameter["subnet"]) } } - - def "A NodeInputFactory should parse a valid NodeInput with double double quotes correctly"() { - given: "a system participant input type factory and model data" - def inputFactory = new NodeInputFactory() - Map parameter = [ - "uuid" : "91ec3bcf-1777-4d38-af67-0bf7c9fa73c7", - "operatesfrom" : "2019-01-01T00:00:00+01:00[Europe/Berlin]", - "operatesuntil": "", - "id" : "TestID", - "vtarget" : "2", - "vrated" : "3", - "slack" : "true", - "geoposition" : "{ \"\"type\"\": \"\"Point\"\", \"\"coordinates\"\": [7.411111, 51.492528] }", - "voltlvl" : "lv", - "subnet" : "7" - ] - def inputClass = NodeInput - def operatorInput = Mock(OperatorInput) - - when: - Optional input = inputFactory.getEntity(new AssetInputEntityData(parameter, inputClass, operatorInput)) - - then: - input.present - input.get().getClass() == inputClass - ((NodeInput) input.get()).with { - assert uuid == UUID.fromString(parameter["uuid"]) - assert operationTime.startDate.present - assert operationTime.startDate.get() == ZonedDateTime.parse(parameter["operatesfrom"]) - assert !operationTime.endDate.present - assert operator == operatorInput - assert id == parameter["id"] - assert vTarget == getQuant(parameter["vtarget"], StandardUnits.TARGET_VOLTAGE_MAGNITUDE) - assert slack - assert geoPosition == getGeometry(parameter["geoposition"]) - assert voltLvl == GermanVoltageLevelUtils.parse(parameter["voltlvl"], getQuant(parameter["vrated"], StandardUnits.RATED_VOLTAGE_MAGNITUDE) as ComparableQuantity) - assert subnet == Integer.parseInt(parameter["subnet"]) - } - } } diff --git a/src/test/groovy/edu/ie3/test/helper/FactoryTestHelper.groovy b/src/test/groovy/edu/ie3/test/helper/FactoryTestHelper.groovy index 1fa01c733..399d82b14 100644 --- a/src/test/groovy/edu/ie3/test/helper/FactoryTestHelper.groovy +++ b/src/test/groovy/edu/ie3/test/helper/FactoryTestHelper.groovy @@ -18,7 +18,6 @@ trait FactoryTestHelper { } static getGeometry(String value) { - value = value.replaceAll("\"\"", "\"") return GEOJSON_READER.read(value) } } From 14529dba961b40ac5c493a4b8faf6f19882cef78 Mon Sep 17 00:00:00 2001 From: dennis Date: Mon, 15 Jun 2020 19:09:02 +0200 Subject: [PATCH 05/30] Spotless Apply... --- src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index 6f560140d..07ba612d9 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -254,8 +254,7 @@ public void persistJointGrid(JointGridContainer jointGridContainer) { storages, wecPlants) .flatMap(Collection::stream) - .map( - Extractor::extractType) // due to a bug in java 8 this *cannot* be replaced with + .map(Extractor::extractType) // due to a bug in java 8 this *cannot* be replaced with // method reference! .collect(Collectors.toSet()); From cfbfdf157b1086c9a967b365c16ff526efb2028e Mon Sep 17 00:00:00 2001 From: dennis Date: Mon, 15 Jun 2020 19:47:52 +0200 Subject: [PATCH 06/30] Undos method reference --- src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index 07ba612d9..fa0a1ce4f 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -254,7 +254,10 @@ public void persistJointGrid(JointGridContainer jointGridContainer) { storages, wecPlants) .flatMap(Collection::stream) - .map(Extractor::extractType) // due to a bug in java 8 this *cannot* be replaced with + .map( + entityWithType -> + Extractor.extractType( + entityWithType)) // due to a bug in java 8 this *cannot* be replaced with // method reference! .collect(Collectors.toSet()); From 6357203406f049581fdfef5fe5fcc5dace5d7b56 Mon Sep 17 00:00:00 2001 From: dennis Date: Mon, 15 Jun 2020 20:22:45 +0200 Subject: [PATCH 07/30] Resets EntityData class --- src/main/java/edu/ie3/datamodel/io/factory/EntityData.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java b/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java index 6dfa901ca..499ae1a6e 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/EntityData.java @@ -92,7 +92,7 @@ public String getField(String field) { public Optional getFieldOptional(String field) { if (!fieldsToAttributes.containsKey(field)) return Optional.empty(); - return Optional.of(fieldsToAttributes.get(field).replaceAll("\"\"", "\"")); + return Optional.of(fieldsToAttributes.get(field)); } /** From e1a6ff0f537c90af26210a3940a198e5b86fa5a2 Mon Sep 17 00:00:00 2001 From: dennis Date: Mon, 15 Jun 2020 20:50:53 +0200 Subject: [PATCH 08/30] Adds all csv spec cases --- .../java/edu/ie3/datamodel/io/sink/CsvFileSink.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index fa0a1ce4f..d13773358 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -166,7 +166,8 @@ public void persistIgnoreNested(C entity) { String[] headerElements = processorProvider.getHeaderElements(entity.getClass()); BufferedCsvWriter writer = connector.getOrInitWriter(entity.getClass(), headerElements, csvSep); - LinkedHashMap quotedEntityFieldData = quoteCSVStrings(entityFieldData); + LinkedHashMap quotedEntityFieldData = + quoteCSVStrings(entityFieldData, csvSep); writer.write(quotedEntityFieldData); } catch (ProcessorProviderException e) { log.error( @@ -192,11 +193,15 @@ public void persistIgnoreNested(C entity) { * @return LinkedHashMap containing all entityData with the relevant data quoted */ private LinkedHashMap quoteCSVStrings( - LinkedHashMap entityFieldData) { + LinkedHashMap entityFieldData, String csvSep) { for (Map.Entry entry : entityFieldData.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); - if (value.matches("(?:.*)\\{(?:.*)}")) { + if (value.matches("(?:.*)\\{(?:.*)}") + || value.contains(csvSep) + || value.contains(",") + || value.contains("\"") + || value.contains("\n")) { entityFieldData.put( key, value From 913fbe56d7ebafb5c66bad1d182615df694a444b Mon Sep 17 00:00:00 2001 From: dennis Date: Tue, 16 Jun 2020 12:53:48 +0200 Subject: [PATCH 09/30] -Removes header Quotes -Adds solution to quote header elements if necessary --- .../datamodel/io/csv/BufferedCsvWriter.java | 2 +- .../ie3/datamodel/io/sink/CsvFileSink.java | 30 ++++++++++++------- 2 files changed, 21 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java b/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java index 41686e53a..a9d1d94c6 100644 --- a/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java +++ b/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java @@ -69,7 +69,7 @@ public void write(Map entityFieldData) throws IOException, SinkE * @throws IOException If something is messed up */ private void writeFileHeader(String[] headLineElements) throws IOException { - writeOneLine(StringUtils.quote(StringUtils.camelCaseToSnakeCase(headLineElements))); + writeOneLine(StringUtils.camelCaseToSnakeCase(headLineElements)); } /** diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index d13773358..06307d5fa 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -194,23 +194,33 @@ public void persistIgnoreNested(C entity) { */ private LinkedHashMap quoteCSVStrings( LinkedHashMap entityFieldData, String csvSep) { + LinkedHashMap quotedEntityFieldData = new LinkedHashMap<>(); for (Map.Entry entry : entityFieldData.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); +/* if (key.matches("(?:.*)\\{(?:.*)}") + || key.contains(csvSep) + || key.contains("uuid") + || key.contains("\"") + || key.contains("\n")) { + key = key.replaceAll("\"", "\"\"") + .replaceAll("^([^\"])", "\"$1") + .replaceAll("([^\"])$", "$1\""); + }*/ if (value.matches("(?:.*)\\{(?:.*)}") - || value.contains(csvSep) - || value.contains(",") - || value.contains("\"") - || value.contains("\n")) { - entityFieldData.put( - key, - value - .replaceAll("\"", "\"\"") + || value.contains(csvSep) + || value.contains(",") + || value.contains("\"") + || value.contains("\n")) { + value = value.replaceAll("\"", "\"\"") .replaceAll("^([^\"])", "\"$1") - .replaceAll("([^\"])$", "$1\"")); + .replaceAll("([^\"])$", "$1\""); } + quotedEntityFieldData.put( + key, + value); } - return entityFieldData; + return quotedEntityFieldData; } @Override From 6b9f910b8ce30abb05f560832c90a45515cc944d Mon Sep 17 00:00:00 2001 From: dennis Date: Tue, 16 Jun 2020 13:40:21 +0200 Subject: [PATCH 10/30] -Implements quoteHeaderElements method to predefine header for CsvFileDefinition --- .../ie3/datamodel/io/sink/CsvFileSink.java | 34 ++++++++++++++++--- 1 file changed, 30 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index 06307d5fa..7c017dadd 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -163,7 +163,7 @@ public void persistIgnoreNested(C entity) { .collect(Collectors.joining(",")) + "]")); - String[] headerElements = processorProvider.getHeaderElements(entity.getClass()); + String[] headerElements = quoteHeaderElements(processorProvider.getHeaderElements(entity.getClass()), csvSep); BufferedCsvWriter writer = connector.getOrInitWriter(entity.getClass(), headerElements, csvSep); LinkedHashMap quotedEntityFieldData = @@ -184,29 +184,55 @@ public void persistIgnoreNested(C entity) { } } + /** + * Quotes header elements to predefine a valid CsvFileDefinition + * + * @param headerElements Array of csv header elements + * @param csvSep Csv separator to check if it appears within the header element + * @return Quoted header elements + */ + + private String[] quoteHeaderElements( + String[] headerElements, String csvSep) { + for (int index = 0; index <= headerElements.length - 1; index ++) { + if (headerElements[index].matches("(?:.*)\\{(?:.*)}") + || headerElements[index].contains(csvSep) + || headerElements[index].contains(",") + || headerElements[index].contains("\"") + || headerElements[index].contains("\n")) { + headerElements[index] = headerElements[index].replaceAll("\"", "\"\"") + .replaceAll("^([^\"])", "\"$1") + .replaceAll("([^\"])$", "$1\""); + } + } + return headerElements; + } + /** * Quotes all fields that contain special characters to comply with the CSV specification RFC 4180 * (https://tools.ietf.org/html/rfc4180) The " contained in the JSON strings are escaped with the * same character to make the CSV data readable later * * @param entityFieldData LinkedHashMap containing all entityData + * @param csvSep Csv separator to check if it appears within the data * @return LinkedHashMap containing all entityData with the relevant data quoted */ + private LinkedHashMap quoteCSVStrings( LinkedHashMap entityFieldData, String csvSep) { LinkedHashMap quotedEntityFieldData = new LinkedHashMap<>(); for (Map.Entry entry : entityFieldData.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); -/* if (key.matches("(?:.*)\\{(?:.*)}") + if (key.matches("(?:.*)\\{(?:.*)}") || key.contains(csvSep) - || key.contains("uuid") + || key.contains(",") || key.contains("\"") || key.contains("\n")) { key = key.replaceAll("\"", "\"\"") .replaceAll("^([^\"])", "\"$1") .replaceAll("([^\"])$", "$1\""); - }*/ + } if (value.matches("(?:.*)\\{(?:.*)}") || value.contains(csvSep) || value.contains(",") From 7cda0a93db19be589eb9f31e23caefcb1ed928af Mon Sep 17 00:00:00 2001 From: dennis Date: Tue, 16 Jun 2020 13:43:35 +0200 Subject: [PATCH 11/30] sA... --- .../ie3/datamodel/io/sink/CsvFileSink.java | 49 ++++++++++--------- 1 file changed, 25 insertions(+), 24 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index 7c017dadd..4eae97d5e 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -163,7 +163,8 @@ public void persistIgnoreNested(C entity) { .collect(Collectors.joining(",")) + "]")); - String[] headerElements = quoteHeaderElements(processorProvider.getHeaderElements(entity.getClass()), csvSep); + String[] headerElements = + quoteHeaderElements(processorProvider.getHeaderElements(entity.getClass()), csvSep); BufferedCsvWriter writer = connector.getOrInitWriter(entity.getClass(), headerElements, csvSep); LinkedHashMap quotedEntityFieldData = @@ -191,16 +192,16 @@ public void persistIgnoreNested(C entity) { * @param csvSep Csv separator to check if it appears within the header element * @return Quoted header elements */ - - private String[] quoteHeaderElements( - String[] headerElements, String csvSep) { - for (int index = 0; index <= headerElements.length - 1; index ++) { + private String[] quoteHeaderElements(String[] headerElements, String csvSep) { + for (int index = 0; index <= headerElements.length - 1; index++) { if (headerElements[index].matches("(?:.*)\\{(?:.*)}") - || headerElements[index].contains(csvSep) - || headerElements[index].contains(",") - || headerElements[index].contains("\"") - || headerElements[index].contains("\n")) { - headerElements[index] = headerElements[index].replaceAll("\"", "\"\"") + || headerElements[index].contains(csvSep) + || headerElements[index].contains(",") + || headerElements[index].contains("\"") + || headerElements[index].contains("\n")) { + headerElements[index] = + headerElements[index] + .replaceAll("\"", "\"\"") .replaceAll("^([^\"])", "\"$1") .replaceAll("([^\"])$", "$1\""); } @@ -217,7 +218,6 @@ private String[] quoteHeaderElements( * @param csvSep Csv separator to check if it appears within the data * @return LinkedHashMap containing all entityData with the relevant data quoted */ - private LinkedHashMap quoteCSVStrings( LinkedHashMap entityFieldData, String csvSep) { LinkedHashMap quotedEntityFieldData = new LinkedHashMap<>(); @@ -225,26 +225,27 @@ private LinkedHashMap quoteCSVStrings( String key = entry.getKey(); String value = entry.getValue(); if (key.matches("(?:.*)\\{(?:.*)}") - || key.contains(csvSep) - || key.contains(",") - || key.contains("\"") - || key.contains("\n")) { - key = key.replaceAll("\"", "\"\"") + || key.contains(csvSep) + || key.contains(",") + || key.contains("\"") + || key.contains("\n")) { + key = + key.replaceAll("\"", "\"\"") .replaceAll("^([^\"])", "\"$1") .replaceAll("([^\"])$", "$1\""); } if (value.matches("(?:.*)\\{(?:.*)}") - || value.contains(csvSep) - || value.contains(",") - || value.contains("\"") - || value.contains("\n")) { - value = value.replaceAll("\"", "\"\"") + || value.contains(csvSep) + || value.contains(",") + || value.contains("\"") + || value.contains("\n")) { + value = + value + .replaceAll("\"", "\"\"") .replaceAll("^([^\"])", "\"$1") .replaceAll("([^\"])$", "$1\""); } - quotedEntityFieldData.put( - key, - value); + quotedEntityFieldData.put(key, value); } return quotedEntityFieldData; } From b13de384e71835228b92b1b19f7f76dd62dcb9bc Mon Sep 17 00:00:00 2001 From: dennis Date: Tue, 16 Jun 2020 16:49:28 +0200 Subject: [PATCH 12/30] -Removes json string regex --- .../java/edu/ie3/datamodel/io/sink/CsvFileSink.java | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index 4eae97d5e..7fe59c731 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -194,8 +194,7 @@ public void persistIgnoreNested(C entity) { */ private String[] quoteHeaderElements(String[] headerElements, String csvSep) { for (int index = 0; index <= headerElements.length - 1; index++) { - if (headerElements[index].matches("(?:.*)\\{(?:.*)}") - || headerElements[index].contains(csvSep) + if (headerElements[index].contains(csvSep) || headerElements[index].contains(",") || headerElements[index].contains("\"") || headerElements[index].contains("\n")) { @@ -224,18 +223,13 @@ private LinkedHashMap quoteCSVStrings( for (Map.Entry entry : entityFieldData.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); - if (key.matches("(?:.*)\\{(?:.*)}") - || key.contains(csvSep) - || key.contains(",") - || key.contains("\"") - || key.contains("\n")) { + if (key.contains(csvSep) || key.contains(",") || key.contains("\"") || key.contains("\n")) { key = key.replaceAll("\"", "\"\"") .replaceAll("^([^\"])", "\"$1") .replaceAll("([^\"])$", "$1\""); } - if (value.matches("(?:.*)\\{(?:.*)}") - || value.contains(csvSep) + if (value.contains(csvSep) || value.contains(",") || value.contains("\"") || value.contains("\n")) { From cd80e279583686bcd80a102c3349c54f97bdef7f Mon Sep 17 00:00:00 2001 From: dennis Date: Tue, 16 Jun 2020 19:21:05 +0200 Subject: [PATCH 13/30] -Turns double double quotes into double quotes when reading csv data --- .../datamodel/io/source/csv/CsvDataSource.java | 12 ++++++++++-- .../io/source/csv/CsvDataSourceTest.groovy | 17 ++++++++++++++++- 2 files changed, 26 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java index b69ce05f9..55270bea9 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java @@ -78,8 +78,10 @@ private Map buildFieldsToAttributes( TreeMap insensitiveFieldsToAttributes = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + final String[] fieldVals = fieldVals(csvSep, csvRow); + try { insensitiveFieldsToAttributes.putAll( IntStream.range(0, fieldVals.length) @@ -131,14 +133,20 @@ private String[] fieldVals(String csvSep, String csvRow) { final String charInputRegex = "(cP:|olm:|cosPhiFixed:|cosPhiP:|qV:)\\{.+?\\}"; final String charReplacement = "charRepl"; - List geoList = extractMatchingStrings(geoJsonRegex, csvRow); - List charList = extractMatchingStrings(charInputRegex, csvRow); + /*removes double double quotes*/ + List geoList = extractMatchingStrings(geoJsonRegex, csvRow.replaceAll("\"\"", "\"")); + List charList = extractMatchingStrings(charInputRegex, csvRow.replaceAll("\"\"", "\"")); AtomicInteger geoCounter = new AtomicInteger(0); AtomicInteger charCounter = new AtomicInteger(0); + System.out.println(geoList); + + return Arrays.stream( csvRow + /*removes double double quotes not covered by json or geo strings*/ + .replaceAll("\"\"", "\"") .replaceAll(charInputRegex, charReplacement) .replaceAll(geoJsonRegex, geoReplacement) .replaceAll("\"", "") diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy index b9eba30e4..7878b158e 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy @@ -173,6 +173,21 @@ class CsvDataSourceTest extends Specification { "{\"type\":\"LineString\",\"coordinates\":[[7.4116482,51.4843281],[7.4116482,51.4843281]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "{\"type\":\"Point\",\"coordinates\":[0.25423729,0.75409836],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:0\"}}}" ] + "," | "4ca90220-74c2-4369-9afa-a18bf068840d,{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528],\"\"crs\"\":{\"\"type\"\":\"\"name\"\",\"\"properties\"\":{\"\"name\"\":\"\"EPSG:4326\"\"}}},node_a,2020-03-25T15:11:31Z[UTC],2020-03-24T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,true,1,1.0,Höchstspannung,380.0,\"olm:{(0.00,1.00)}\",\"cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}\"" || [ + "4ca90220-74c2-4369-9afa-a18bf068840d", + "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "node_a", + "2020-03-25T15:11:31Z[UTC]", + "2020-03-24T15:11:31Z[UTC]", + "8f9682df-0744-4b58-a122-f0dc730f6510", + "true", + "1", + "1.0", + "Höchstspannung", + "380.0", + "olm:{(0.00,1.00)}", + "cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" + ] } @@ -191,7 +206,7 @@ class CsvDataSourceTest extends Specification { "s_rated", "olmcharacteristic", "cosPhiFixed"] as String[] - def validCsvRow = "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,25.0,100.0,0.95,98.0,test_bmTypeInput,50.0,25.0,olm:{(0.0,1.0)}," + def validCsvRow = "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,25.0,100.0,0.95,98.0,test_bmTypeInput,50.0,25.0,\"olm:{(0.0,1.0)}\"," expect: dummyCsvSource.buildFieldsToAttributes(validCsvRow, validHeadline) == [ From 4ca3164ed5279909efb19c80a4006bbc3711ba2e Mon Sep 17 00:00:00 2001 From: dennis Date: Tue, 16 Jun 2020 20:44:19 +0200 Subject: [PATCH 14/30] -Implements csv quoting from utils --- .../ie3/datamodel/io/sink/CsvFileSink.java | 119 ++++++++++-------- .../io/source/csv/CsvDataSource.java | 5 - .../datamodel/io/sink/CsvFileSinkTest.groovy | 109 ++++++++++++++++ .../io/source/csv/CsvDataSourceTest.groovy | 26 ++-- 4 files changed, 188 insertions(+), 71 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index 7fe59c731..e0568a310 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -163,12 +163,28 @@ public void persistIgnoreNested(C entity) { .collect(Collectors.joining(",")) + "]")); - String[] headerElements = - quoteHeaderElements(processorProvider.getHeaderElements(entity.getClass()), csvSep); + String[] headerElements = processorProvider.getHeaderElements(entity.getClass()); + String[] headerElementsQuoted = + Arrays.stream(headerElements) + .map(inputElement -> csvString(inputElement, ",")) + .toArray(String[]::new); + BufferedCsvWriter writer = - connector.getOrInitWriter(entity.getClass(), headerElements, csvSep); + connector.getOrInitWriter(entity.getClass(), headerElementsQuoted, csvSep); LinkedHashMap quotedEntityFieldData = - quoteCSVStrings(entityFieldData, csvSep); + entityFieldData.entrySet().stream() + .map( + mapEntry -> + new AbstractMap.SimpleEntry<>( + csvString(mapEntry.getKey(), ","), csvString(mapEntry.getValue(), ","))) + .collect( + Collectors.toMap( + AbstractMap.SimpleEntry::getKey, + AbstractMap.SimpleEntry::getValue, + (v1, v2) -> { + throw new IllegalStateException(); + }, + LinkedHashMap::new)); writer.write(quotedEntityFieldData); } catch (ProcessorProviderException e) { log.error( @@ -186,62 +202,59 @@ public void persistIgnoreNested(C entity) { } /** - * Quotes header elements to predefine a valid CsvFileDefinition + * Adds quotation marks at the beginning and end of the input, if they are not apparent, yet. * - * @param headerElements Array of csv header elements - * @param csvSep Csv separator to check if it appears within the header element - * @return Quoted header elements + * @param input String to quote + * @return Quoted String */ - private String[] quoteHeaderElements(String[] headerElements, String csvSep) { - for (int index = 0; index <= headerElements.length - 1; index++) { - if (headerElements[index].contains(csvSep) - || headerElements[index].contains(",") - || headerElements[index].contains("\"") - || headerElements[index].contains("\n")) { - headerElements[index] = - headerElements[index] - .replaceAll("\"", "\"\"") - .replaceAll("^([^\"])", "\"$1") - .replaceAll("([^\"])$", "$1\""); - } - } - return headerElements; + public static String quote(String input) { + return quoteEnd(quoteStart(input)); + } + + private static String quoteStart(String input) { + return input.replaceAll("^([^\"])", "\"$1"); + } + + private static String quoteEnd(String input) { + return input.replaceAll("([^\"])$", "$1\""); } /** - * Quotes all fields that contain special characters to comply with the CSV specification RFC 4180 - * (https://tools.ietf.org/html/rfc4180) The " contained in the JSON strings are escaped with the - * same character to make the CSV data readable later + * Quotes a given string that contains special characters to comply with the csv specification RFC + * 4180 (https://tools.ietf.org/html/rfc4180). Double quotes in JSON strings are escaped with the + * same character to make the csv data readable later. * - * @param entityFieldData LinkedHashMap containing all entityData - * @param csvSep Csv separator to check if it appears within the data - * @return LinkedHashMap containing all entityData with the relevant data quoted + * @param inputString string that should be converted to a valid rfc 4180 string + * @param csvSep separator of the csv file + * @return a csv string that is valid according to rfc 4180 */ - private LinkedHashMap quoteCSVStrings( - LinkedHashMap entityFieldData, String csvSep) { - LinkedHashMap quotedEntityFieldData = new LinkedHashMap<>(); - for (Map.Entry entry : entityFieldData.entrySet()) { - String key = entry.getKey(); - String value = entry.getValue(); - if (key.contains(csvSep) || key.contains(",") || key.contains("\"") || key.contains("\n")) { - key = - key.replaceAll("\"", "\"\"") - .replaceAll("^([^\"])", "\"$1") - .replaceAll("([^\"])$", "$1\""); - } - if (value.contains(csvSep) - || value.contains(",") - || value.contains("\"") - || value.contains("\n")) { - value = - value - .replaceAll("\"", "\"\"") - .replaceAll("^([^\"])", "\"$1") - .replaceAll("([^\"])$", "$1\""); - } - quotedEntityFieldData.put(key, value); - } - return quotedEntityFieldData; + public static String csvString(String inputString, String csvSep) { + if (needsCsvRFC4180Quote(inputString, csvSep)) { + /* clean the string by first quoting start and end of the string and then replace all double quotes + * that are followed by one or more double quotes with single double quotes */ + String quotedStartEndString = quote(inputString).replaceAll("\"\"*", "\""); + /* get everything in between the start and end quotes and replace single quotes with double quotes */ + String stringWOStartEndQuotes = + quotedStartEndString + .substring(1, quotedStartEndString.length() - 1) + .replaceAll("\"", "\"\""); + /* finally add quotes to the strings start and end again */ + return quote(stringWOStartEndQuotes); + } else return inputString; + } + + /** + * Check if the provided string needs to be quoted according to the csv specification RFC 4180 + * + * @param inputString the string that should be checked + * @param csvSep separator of the csv file + * @return true of the string needs to be quoted, false otherwise + */ + private static boolean needsCsvRFC4180Quote(String inputString, String csvSep) { + return inputString.contains(csvSep) + || inputString.contains(",") + || inputString.contains("\"") + || inputString.contains("\n"); } @Override diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java index 55270bea9..c4c6ea65a 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java @@ -78,10 +78,8 @@ private Map buildFieldsToAttributes( TreeMap insensitiveFieldsToAttributes = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); - final String[] fieldVals = fieldVals(csvSep, csvRow); - try { insensitiveFieldsToAttributes.putAll( IntStream.range(0, fieldVals.length) @@ -140,9 +138,6 @@ private String[] fieldVals(String csvSep, String csvRow) { AtomicInteger geoCounter = new AtomicInteger(0); AtomicInteger charCounter = new AtomicInteger(0); - System.out.println(geoList); - - return Arrays.stream( csvRow /*removes double double quotes not covered by json or geo strings*/ diff --git a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy index a4326c741..546e41ff6 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy @@ -32,6 +32,7 @@ import edu.ie3.datamodel.models.value.EnergyPriceValue import edu.ie3.test.common.GridTestData import edu.ie3.test.common.TimeSeriesTestData import edu.ie3.test.common.ThermalUnitInputTestData +import edu.ie3.util.StringUtils import edu.ie3.util.TimeUtil import edu.ie3.util.io.FileIOUtils import spock.lang.Shared @@ -40,6 +41,7 @@ import tec.uom.se.quantity.Quantities import javax.measure.Quantity import javax.measure.quantity.Power +import java.util.stream.Collectors class CsvFileSinkTest extends Specification implements TimeSeriesTestData { @@ -186,4 +188,111 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { new File(testBaseFolderPath + File.separator + "individual_time_series_a4bbcb77-b9d0-4b88-92be-b9a14a3e332b.csv").exists() new File(testBaseFolderPath + File.separator + "load_profile_time_series_g2_b56853fe-b800-4c18-b324-db1878b22a28.csv").exists() } + + def "The StringUtils converts a given Array of csv header elements to match the csv specification RFC 4180 "() { + given: + def input = [ + "4ca90220-74c2-4369-9afa-a18bf068840d", + "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "node_a", + "2020-03-25T15:11:31Z[UTC] \n 2020-03-24T15:11:31Z[UTC]", + "8f9682df-0744-4b58-a122-f0dc730f6510", + "true", + "1,0", + "1.0", + "Höchstspannung", + "380.0", + "olm:{(0.00,1.00)}", + "cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" + ] + def expected = [ + "4ca90220-74c2-4369-9afa-a18bf068840d", + "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528],\"\"crs\"\":{\"\"type\"\":\"\"name\"\",\"\"properties\"\":{\"\"name\"\":\"\"EPSG:4326\"\"}}}\"", + "node_a", + "\"2020-03-25T15:11:31Z[UTC] \n 2020-03-24T15:11:31Z[UTC]\"", + "8f9682df-0744-4b58-a122-f0dc730f6510", + "true", + "\"1,0\"", + "1.0", + "Höchstspannung", + "380.0", + "\"olm:{(0.00,1.00)}\"", + "\"cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}\""] as Set + + when: + def actual = input.stream().map({ inputElement -> CsvFileSink.csvString(inputElement, ",") }).collect(Collectors.toSet()) as Set + + then: + actual == expected + } + + /*Will move to PowerSystemDataUtils*/ + + def "The StringUtils converts a given LinkedHashMap of csv data to match the csv specification RFC 4180 "() { + given: + def input = [ + "activePowerGradient": "25.0", + "capex" : "100,0", + "cosphiRated" : "0.95", + "etaConv" : "98.0", + "id" : "test \n bmTypeInput", + "opex" : "50.0", + "sRated" : "25.0", + "uu,id" : "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8", + "geoPosition" : "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "olm\"characteristic": "olm:{(0.0,1.0)}", + "cosPhiFixed" : "cosPhiFixed:{(0.0,1.0)}" + ] as LinkedHashMap + + def expected = [ + "activePowerGradient" : "25.0", + "capex" : "\"100,0\"", + "cosphiRated" : "0.95", + "etaConv" : "98.0", + "id" : "\"test \n bmTypeInput\"", + "opex" : "50.0", + "sRated" : "25.0", + "\"uu,id\"" : "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8", + "geoPosition" : "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528],\"\"crs\"\":{\"\"type\"\":\"\"name\"\",\"\"properties\"\":{\"\"name\"\":\"\"EPSG:4326\"\"}}}\"", + "\"olm\"\"characteristic\"": "\"olm:{(0.0,1.0)}\"", + "cosPhiFixed" : "\"cosPhiFixed:{(0.0,1.0)}\"" + ] as LinkedHashMap + + when: + def actualList = input.entrySet().stream().map({ mapEntry -> + return new AbstractMap.SimpleEntry(CsvFileSink.csvString(mapEntry.key, ","), CsvFileSink.csvString(mapEntry.value, ",")) + }) as Set + + def actual = actualList.collectEntries { + [it.key, it.value] + } + + then: + actual == expected + } + + def "The StringUtils converts a given Array of csv header elements to match the csv specification RFC "() { + expect: + CsvFileSink.csvString(inputString, csvSep) == expect + + where: + inputString | csvSep || expect + "activePowerGradient" | "," || "activePowerGradient" + "\"100,0\"" | "," || "\"100,0\"" + "100,0" | "," || "\"100,0\"" + "100,0" | ";" || "\"100,0\"" + "100;0" | ";" || "\"100;0\"" + "\"100;0\"" | ";" || "\"100;0\"" + "100;0" | "," || "100;0" + "olm:{(0.00,1.00)}" | "," || "\"olm:{(0.00,1.00)}\"" + "olm:{(0.00,1.00)}" | ";" || "\"olm:{(0.00,1.00)}\"" + "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528]}" | "," || "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" + "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528]}" | ";" || "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" + "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" | "," || "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" + "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" | ";" || "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" + "\"{\"\"type\"\"\":\"\"Point\"\"\"\",\"\"coordinates\"\"\":[7.411111,51.492528]}\"" | "," || "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" + "\"{\"\"type\"\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" | ";" || "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" + "uu,id" | "," || "\"uu,id\"" + "uu,id" | ";" || "\"uu,id\"" + } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy index 7878b158e..ef6aaeaec 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy @@ -174,19 +174,19 @@ class CsvDataSourceTest extends Specification { "{\"type\":\"Point\",\"coordinates\":[0.25423729,0.75409836],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:0\"}}}" ] "," | "4ca90220-74c2-4369-9afa-a18bf068840d,{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528],\"\"crs\"\":{\"\"type\"\":\"\"name\"\",\"\"properties\"\":{\"\"name\"\":\"\"EPSG:4326\"\"}}},node_a,2020-03-25T15:11:31Z[UTC],2020-03-24T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,true,1,1.0,Höchstspannung,380.0,\"olm:{(0.00,1.00)}\",\"cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}\"" || [ - "4ca90220-74c2-4369-9afa-a18bf068840d", - "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", - "node_a", - "2020-03-25T15:11:31Z[UTC]", - "2020-03-24T15:11:31Z[UTC]", - "8f9682df-0744-4b58-a122-f0dc730f6510", - "true", - "1", - "1.0", - "Höchstspannung", - "380.0", - "olm:{(0.00,1.00)}", - "cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" + "4ca90220-74c2-4369-9afa-a18bf068840d", + "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "node_a", + "2020-03-25T15:11:31Z[UTC]", + "2020-03-24T15:11:31Z[UTC]", + "8f9682df-0744-4b58-a122-f0dc730f6510", + "true", + "1", + "1.0", + "Höchstspannung", + "380.0", + "olm:{(0.00,1.00)}", + "cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" ] From 049af4119ddf889f397dc59276390f69700b3d2e Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Wed, 17 Jun 2020 11:33:39 +0200 Subject: [PATCH 15/30] adapted CsvFileSink for valid quoting --- .../ie3/datamodel/io/sink/CsvFileSink.java | 74 +++++++++++++------ 1 file changed, 50 insertions(+), 24 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index e0568a310..95a1da505 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -145,6 +145,46 @@ public void persist(T entity) { } } + /** + * TODO JH + * + * @param headerElements + * @return + */ + private String[] csvHeaderElements(String[] headerElements) { + return Arrays.stream(headerElements) + .map(inputElement -> csvString(inputElement, csvSep)) + .toArray(String[]::new); + } + + /** + * // todo JH + * + * @param entityFieldData + * @return + */ + private LinkedHashMap csvEntityFieldData( + LinkedHashMap entityFieldData) { + + return entityFieldData.entrySet().stream() + .map( + mapEntry -> + new AbstractMap.SimpleEntry<>( + csvString(mapEntry.getKey(), ","), csvString(mapEntry.getValue(), ","))) + .collect( + Collectors.toMap( + AbstractMap.SimpleEntry::getKey, + AbstractMap.SimpleEntry::getValue, + (v1, v2) -> { + throw new IllegalStateException( + "Duplicate keys in entityFieldData are not allowed!" + + entityFieldData.entrySet().stream() + .map(entry -> entry.getKey() + " = " + entry.getValue()) + .collect(Collectors.joining(",\n"))); + }, + LinkedHashMap::new)); + } + @Override public void persistIgnoreNested(C entity) { LinkedHashMap entityFieldData; @@ -163,29 +203,13 @@ public void persistIgnoreNested(C entity) { .collect(Collectors.joining(",")) + "]")); - String[] headerElements = processorProvider.getHeaderElements(entity.getClass()); - String[] headerElementsQuoted = - Arrays.stream(headerElements) - .map(inputElement -> csvString(inputElement, ",")) - .toArray(String[]::new); + String[] headerElements = + csvHeaderElements(processorProvider.getHeaderElements(entity.getClass())); BufferedCsvWriter writer = - connector.getOrInitWriter(entity.getClass(), headerElementsQuoted, csvSep); - LinkedHashMap quotedEntityFieldData = - entityFieldData.entrySet().stream() - .map( - mapEntry -> - new AbstractMap.SimpleEntry<>( - csvString(mapEntry.getKey(), ","), csvString(mapEntry.getValue(), ","))) - .collect( - Collectors.toMap( - AbstractMap.SimpleEntry::getKey, - AbstractMap.SimpleEntry::getValue, - (v1, v2) -> { - throw new IllegalStateException(); - }, - LinkedHashMap::new)); - writer.write(quotedEntityFieldData); + connector.getOrInitWriter(entity.getClass(), headerElements, csvSep); + + writer.write(csvEntityFieldData(entityFieldData)); } catch (ProcessorProviderException e) { log.error( "Exception occurred during receiving of header elements. Cannot write this element.", e); @@ -372,12 +396,12 @@ public , V extends Value> void persistTimeSeries( .collect(Collectors.joining(",")) + "]")); - String[] headerElements = processorProvider.getHeaderElements(key); + String[] headerElements = csvHeaderElements(processorProvider.getHeaderElements(key)); BufferedCsvWriter writer = connector.getOrInitWriter(timeSeries, headerElements, csvSep); entityFieldData.forEach( data -> { try { - writer.write(data); + writer.write(csvEntityFieldData(data)); } catch (IOException e) { log.error( "Cannot write the following entity data: '{}'. Exception: {}", @@ -413,7 +437,9 @@ private void initFiles( .forEach( clz -> { try { - String[] headerElements = processorProvider.getHeaderElements(clz); + String[] headerElements = + csvHeaderElements(processorProvider.getHeaderElements(clz)); + connector.getOrInitWriter(clz, headerElements, csvSep); } catch (ProcessorProviderException e) { log.error( From 4c2b8589a9089e4d35037050eb4ab77fc560ca28 Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Wed, 17 Jun 2020 12:32:58 +0200 Subject: [PATCH 16/30] - fix test - adapt CsvDataSource to prevent confusing double quote replacements --- .../io/source/csv/CsvDataSource.java | 4 +- .../datamodel/io/sink/CsvFileSinkTest.groovy | 107 ------------------ .../io/source/csv/CsvDataSourceTest.groovy | 2 - 3 files changed, 1 insertion(+), 112 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java index c4c6ea65a..aaabb7ddf 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java @@ -140,11 +140,9 @@ private String[] fieldVals(String csvSep, String csvRow) { return Arrays.stream( csvRow - /*removes double double quotes not covered by json or geo strings*/ - .replaceAll("\"\"", "\"") .replaceAll(charInputRegex, charReplacement) .replaceAll(geoJsonRegex, geoReplacement) - .replaceAll("\"", "") + .replaceAll("\"*", "") // remove all quotes from .split(csvSep, -1)) .map( fieldVal -> { diff --git a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy index 546e41ff6..fdbca3f26 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy @@ -188,111 +188,4 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { new File(testBaseFolderPath + File.separator + "individual_time_series_a4bbcb77-b9d0-4b88-92be-b9a14a3e332b.csv").exists() new File(testBaseFolderPath + File.separator + "load_profile_time_series_g2_b56853fe-b800-4c18-b324-db1878b22a28.csv").exists() } - - def "The StringUtils converts a given Array of csv header elements to match the csv specification RFC 4180 "() { - given: - def input = [ - "4ca90220-74c2-4369-9afa-a18bf068840d", - "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", - "node_a", - "2020-03-25T15:11:31Z[UTC] \n 2020-03-24T15:11:31Z[UTC]", - "8f9682df-0744-4b58-a122-f0dc730f6510", - "true", - "1,0", - "1.0", - "Höchstspannung", - "380.0", - "olm:{(0.00,1.00)}", - "cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" - ] - def expected = [ - "4ca90220-74c2-4369-9afa-a18bf068840d", - "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528],\"\"crs\"\":{\"\"type\"\":\"\"name\"\",\"\"properties\"\":{\"\"name\"\":\"\"EPSG:4326\"\"}}}\"", - "node_a", - "\"2020-03-25T15:11:31Z[UTC] \n 2020-03-24T15:11:31Z[UTC]\"", - "8f9682df-0744-4b58-a122-f0dc730f6510", - "true", - "\"1,0\"", - "1.0", - "Höchstspannung", - "380.0", - "\"olm:{(0.00,1.00)}\"", - "\"cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}\""] as Set - - when: - def actual = input.stream().map({ inputElement -> CsvFileSink.csvString(inputElement, ",") }).collect(Collectors.toSet()) as Set - - then: - actual == expected - } - - /*Will move to PowerSystemDataUtils*/ - - def "The StringUtils converts a given LinkedHashMap of csv data to match the csv specification RFC 4180 "() { - given: - def input = [ - "activePowerGradient": "25.0", - "capex" : "100,0", - "cosphiRated" : "0.95", - "etaConv" : "98.0", - "id" : "test \n bmTypeInput", - "opex" : "50.0", - "sRated" : "25.0", - "uu,id" : "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8", - "geoPosition" : "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", - "olm\"characteristic": "olm:{(0.0,1.0)}", - "cosPhiFixed" : "cosPhiFixed:{(0.0,1.0)}" - ] as LinkedHashMap - - def expected = [ - "activePowerGradient" : "25.0", - "capex" : "\"100,0\"", - "cosphiRated" : "0.95", - "etaConv" : "98.0", - "id" : "\"test \n bmTypeInput\"", - "opex" : "50.0", - "sRated" : "25.0", - "\"uu,id\"" : "5ebd8f7e-dedb-4017-bb86-6373c4b68eb8", - "geoPosition" : "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528],\"\"crs\"\":{\"\"type\"\":\"\"name\"\",\"\"properties\"\":{\"\"name\"\":\"\"EPSG:4326\"\"}}}\"", - "\"olm\"\"characteristic\"": "\"olm:{(0.0,1.0)}\"", - "cosPhiFixed" : "\"cosPhiFixed:{(0.0,1.0)}\"" - ] as LinkedHashMap - - when: - def actualList = input.entrySet().stream().map({ mapEntry -> - return new AbstractMap.SimpleEntry(CsvFileSink.csvString(mapEntry.key, ","), CsvFileSink.csvString(mapEntry.value, ",")) - }) as Set - - def actual = actualList.collectEntries { - [it.key, it.value] - } - - then: - actual == expected - } - - def "The StringUtils converts a given Array of csv header elements to match the csv specification RFC "() { - expect: - CsvFileSink.csvString(inputString, csvSep) == expect - - where: - inputString | csvSep || expect - "activePowerGradient" | "," || "activePowerGradient" - "\"100,0\"" | "," || "\"100,0\"" - "100,0" | "," || "\"100,0\"" - "100,0" | ";" || "\"100,0\"" - "100;0" | ";" || "\"100;0\"" - "\"100;0\"" | ";" || "\"100;0\"" - "100;0" | "," || "100;0" - "olm:{(0.00,1.00)}" | "," || "\"olm:{(0.00,1.00)}\"" - "olm:{(0.00,1.00)}" | ";" || "\"olm:{(0.00,1.00)}\"" - "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528]}" | "," || "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" - "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528]}" | ";" || "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" - "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" | "," || "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" - "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" | ";" || "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" - "\"{\"\"type\"\"\":\"\"Point\"\"\"\",\"\"coordinates\"\"\":[7.411111,51.492528]}\"" | "," || "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" - "\"{\"\"type\"\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" | ";" || "\"{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528]}\"" - "uu,id" | "," || "\"uu,id\"" - "uu,id" | ";" || "\"uu,id\"" - } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy index ef6aaeaec..c419f3643 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy @@ -188,8 +188,6 @@ class CsvDataSourceTest extends Specification { "olm:{(0.00,1.00)}", "cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" ] - - } From e0134db5c2741fd05b7b7367e420ebd11def4c95 Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Wed, 17 Jun 2020 12:41:46 +0200 Subject: [PATCH 17/30] replaced quoting in CsvFileSink with StringUtils --- .../ie3/datamodel/io/sink/CsvFileSink.java | 80 ++++--------------- 1 file changed, 14 insertions(+), 66 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index 95a1da505..114690d19 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -28,6 +28,7 @@ import edu.ie3.datamodel.models.timeseries.TimeSeries; import edu.ie3.datamodel.models.timeseries.TimeSeriesEntry; import edu.ie3.datamodel.models.value.Value; +import edu.ie3.util.StringUtils; import java.io.IOException; import java.util.*; import java.util.stream.Collectors; @@ -146,22 +147,24 @@ public void persist(T entity) { } /** - * TODO JH + * Transforms a provided array of strings to valid csv formatted strings (according to csv + * specification RFC 4180) * - * @param headerElements - * @return + * @param strings array of strings that should be processed + * @return a new array with valid csv formatted strings */ - private String[] csvHeaderElements(String[] headerElements) { - return Arrays.stream(headerElements) - .map(inputElement -> csvString(inputElement, csvSep)) + private String[] csvHeaderElements(String[] strings) { + return Arrays.stream(strings) + .map(inputElement -> StringUtils.csvString(inputElement, csvSep)) .toArray(String[]::new); } /** - * // todo JH + * Transforms a provided map of string to string to valid csv formatted strings (according to csv + * specification RFC 4180) * - * @param entityFieldData - * @return + * @param entityFieldData a string to string map that should be processed + * @return a new map with valid csv formatted keys and values strings */ private LinkedHashMap csvEntityFieldData( LinkedHashMap entityFieldData) { @@ -170,7 +173,8 @@ private LinkedHashMap csvEntityFieldData( .map( mapEntry -> new AbstractMap.SimpleEntry<>( - csvString(mapEntry.getKey(), ","), csvString(mapEntry.getValue(), ","))) + StringUtils.csvString(mapEntry.getKey(), ","), + StringUtils.csvString(mapEntry.getValue(), ","))) .collect( Collectors.toMap( AbstractMap.SimpleEntry::getKey, @@ -225,62 +229,6 @@ public void persistIgnoreNested(C entity) { } } - /** - * Adds quotation marks at the beginning and end of the input, if they are not apparent, yet. - * - * @param input String to quote - * @return Quoted String - */ - public static String quote(String input) { - return quoteEnd(quoteStart(input)); - } - - private static String quoteStart(String input) { - return input.replaceAll("^([^\"])", "\"$1"); - } - - private static String quoteEnd(String input) { - return input.replaceAll("([^\"])$", "$1\""); - } - - /** - * Quotes a given string that contains special characters to comply with the csv specification RFC - * 4180 (https://tools.ietf.org/html/rfc4180). Double quotes in JSON strings are escaped with the - * same character to make the csv data readable later. - * - * @param inputString string that should be converted to a valid rfc 4180 string - * @param csvSep separator of the csv file - * @return a csv string that is valid according to rfc 4180 - */ - public static String csvString(String inputString, String csvSep) { - if (needsCsvRFC4180Quote(inputString, csvSep)) { - /* clean the string by first quoting start and end of the string and then replace all double quotes - * that are followed by one or more double quotes with single double quotes */ - String quotedStartEndString = quote(inputString).replaceAll("\"\"*", "\""); - /* get everything in between the start and end quotes and replace single quotes with double quotes */ - String stringWOStartEndQuotes = - quotedStartEndString - .substring(1, quotedStartEndString.length() - 1) - .replaceAll("\"", "\"\""); - /* finally add quotes to the strings start and end again */ - return quote(stringWOStartEndQuotes); - } else return inputString; - } - - /** - * Check if the provided string needs to be quoted according to the csv specification RFC 4180 - * - * @param inputString the string that should be checked - * @param csvSep separator of the csv file - * @return true of the string needs to be quoted, false otherwise - */ - private static boolean needsCsvRFC4180Quote(String inputString, String csvSep) { - return inputString.contains(csvSep) - || inputString.contains(",") - || inputString.contains("\"") - || inputString.contains("\n"); - } - @Override public void persistAllIgnoreNested(Collection entities) { entities.parallelStream().forEach(this::persistIgnoreNested); From d381577fa3131dc882128f774193f27e7a204c89 Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Wed, 17 Jun 2020 12:42:21 +0200 Subject: [PATCH 18/30] updated PowerSystemUtils dependency to current SNAPSHOT version --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 349eeb5fe..85f317f05 100644 --- a/build.gradle +++ b/build.gradle @@ -49,7 +49,7 @@ repositories { dependencies { // ie³ power system utils - compile 'com.github.ie3-institute:PowerSystemUtils:1.3.1' + compile 'com.github.ie3-institute:PowerSystemUtils:1.3.2-SNAPSHOT' compile "tec.uom:uom-se:$unitsOfMeasurementVersion" From d6d92894d5fa6882bde13438185e4a16b19baf2a Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Wed, 17 Jun 2020 12:43:31 +0200 Subject: [PATCH 19/30] changed method order in CsvFileSink --- .../ie3/datamodel/io/sink/CsvFileSink.java | 86 +++++++++---------- 1 file changed, 43 insertions(+), 43 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index 114690d19..4a77d9ba5 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -146,49 +146,6 @@ public void persist(T entity) { } } - /** - * Transforms a provided array of strings to valid csv formatted strings (according to csv - * specification RFC 4180) - * - * @param strings array of strings that should be processed - * @return a new array with valid csv formatted strings - */ - private String[] csvHeaderElements(String[] strings) { - return Arrays.stream(strings) - .map(inputElement -> StringUtils.csvString(inputElement, csvSep)) - .toArray(String[]::new); - } - - /** - * Transforms a provided map of string to string to valid csv formatted strings (according to csv - * specification RFC 4180) - * - * @param entityFieldData a string to string map that should be processed - * @return a new map with valid csv formatted keys and values strings - */ - private LinkedHashMap csvEntityFieldData( - LinkedHashMap entityFieldData) { - - return entityFieldData.entrySet().stream() - .map( - mapEntry -> - new AbstractMap.SimpleEntry<>( - StringUtils.csvString(mapEntry.getKey(), ","), - StringUtils.csvString(mapEntry.getValue(), ","))) - .collect( - Collectors.toMap( - AbstractMap.SimpleEntry::getKey, - AbstractMap.SimpleEntry::getValue, - (v1, v2) -> { - throw new IllegalStateException( - "Duplicate keys in entityFieldData are not allowed!" - + entityFieldData.entrySet().stream() - .map(entry -> entry.getKey() + " = " + entry.getValue()) - .collect(Collectors.joining(",\n"))); - }, - LinkedHashMap::new)); - } - @Override public void persistIgnoreNested(C entity) { LinkedHashMap entityFieldData; @@ -402,4 +359,47 @@ private void initFiles( } }); } + + /** + * Transforms a provided array of strings to valid csv formatted strings (according to csv + * specification RFC 4180) + * + * @param strings array of strings that should be processed + * @return a new array with valid csv formatted strings + */ + private String[] csvHeaderElements(String[] strings) { + return Arrays.stream(strings) + .map(inputElement -> StringUtils.csvString(inputElement, csvSep)) + .toArray(String[]::new); + } + + /** + * Transforms a provided map of string to string to valid csv formatted strings (according to csv + * specification RFC 4180) + * + * @param entityFieldData a string to string map that should be processed + * @return a new map with valid csv formatted keys and values strings + */ + private LinkedHashMap csvEntityFieldData( + LinkedHashMap entityFieldData) { + + return entityFieldData.entrySet().stream() + .map( + mapEntry -> + new AbstractMap.SimpleEntry<>( + StringUtils.csvString(mapEntry.getKey(), ","), + StringUtils.csvString(mapEntry.getValue(), ","))) + .collect( + Collectors.toMap( + AbstractMap.SimpleEntry::getKey, + AbstractMap.SimpleEntry::getValue, + (v1, v2) -> { + throw new IllegalStateException( + "Duplicate keys in entityFieldData are not allowed!" + + entityFieldData.entrySet().stream() + .map(entry -> entry.getKey() + " = " + entry.getValue()) + .collect(Collectors.joining(",\n"))); + }, + LinkedHashMap::new)); + } } From 5f80e75ab31485567fa859104da1e71f5106ea7f Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Wed, 17 Jun 2020 12:58:18 +0200 Subject: [PATCH 20/30] minor change in CsvFileSink --- src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index 4a77d9ba5..db8741556 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -148,9 +148,8 @@ public void persist(T entity) { @Override public void persistIgnoreNested(C entity) { - LinkedHashMap entityFieldData; try { - entityFieldData = + LinkedHashMap entityFieldData = processorProvider .handleEntity(entity) .orElseThrow( From d7ea57d474defe10861984bb8283b785b67c97c9 Mon Sep 17 00:00:00 2001 From: johanneshiry Date: Thu, 18 Jun 2020 13:02:22 +0200 Subject: [PATCH 21/30] Update src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java Co-authored-by: Chris Kittl <44838605+ckittl@users.noreply.github.com> --- src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java index db8741556..680e85109 100644 --- a/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java +++ b/src/main/java/edu/ie3/datamodel/io/sink/CsvFileSink.java @@ -386,8 +386,8 @@ private LinkedHashMap csvEntityFieldData( .map( mapEntry -> new AbstractMap.SimpleEntry<>( - StringUtils.csvString(mapEntry.getKey(), ","), - StringUtils.csvString(mapEntry.getValue(), ","))) + StringUtils.csvString(mapEntry.getKey(), csvSep), + StringUtils.csvString(mapEntry.getValue(), csvSep))) .collect( Collectors.toMap( AbstractMap.SimpleEntry::getKey, From 3831a3caccdd6ec38fbb0474d338b887e920f712 Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Thu, 18 Jun 2020 13:07:24 +0200 Subject: [PATCH 22/30] added sonatype snapshot repo --- build.gradle | 3 +++ 1 file changed, 3 insertions(+) diff --git a/build.gradle b/build.gradle index 85f317f05..1f43169fb 100644 --- a/build.gradle +++ b/build.gradle @@ -45,6 +45,9 @@ repositories { jcenter() //searches in bintray's repository 'jCenter', which contains Maven Central maven { url 'https://www.jitpack.io' } // allows github repos as dependencies + // sonatype snapshot repo + maven { url 'http://oss.sonatype.org/content/repositories/snapshots' } + } dependencies { From 2d10c3df52c19318646d1dd33b4b7916116350de Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Thu, 18 Jun 2020 15:37:35 +0200 Subject: [PATCH 23/30] - set CsvDataSource to RFC 4180 valid parsing - extensive tests for new parsing --- CHANGELOG.md | 1 + .../io/source/csv/CsvDataSource.java | 49 +++++-- .../io/source/csv/CsvIdCoordinateSource.java | 2 +- .../io/source/csv/CsvDataSourceTest.groovy | 126 +++++++++++++++++- 4 files changed, 162 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 26ff1d0c0..e97c990e5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,6 +32,7 @@ coordinates or multiple exactly equal coordinates possible - Extended functionality of `GridAndGeoUtils` - `CsvFileConnector` is now set up to process either UniqueEntities or only by file name - `SwitchResult` superclass changed from `ConnectorResult` to `ResultEntity` +- ``CsvDataSource`` now parses valid RFC 4180 rows correctly (invalid, old syntax is still supported but deprecated!) ### Fixed - CsvDataSource now stops trying to get an operator for empty operator uuid field in entities diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java index aaabb7ddf..300d16331 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java @@ -78,15 +78,33 @@ private Map buildFieldsToAttributes( TreeMap insensitiveFieldsToAttributes = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); - final String[] fieldVals = fieldVals(csvSep, csvRow); + // when replacing deprecated workaround add final modifier before parseCsvRow as well as remove + // 'finalFieldVals' below! + String[] fieldVals = parseCsvRow(csvSep, csvRow); + + // start workaround for deprecated data model processing + if (fieldVals.length != headline.length) { + // try to parse old structure + fieldVals = oldFieldVals(csvSep, csvRow); + // if this works log a warning to inform the user that this will not work much longer, + // otherwise + // parsing will fail regularly as expected below + if (fieldVals.length == headline.length) + log.warn( + "You are using an outdated version of the data " + + "model with invalid formatted csv rows. This is okay for now, but please updated your files, as the " + + "support for the old model will be removed soon."); + } + // end workaround for deprecated data model processing try { + String[] finalFieldVals = fieldVals; insensitiveFieldsToAttributes.putAll( IntStream.range(0, fieldVals.length) .boxed() .collect( Collectors.toMap( - k -> StringUtils.snakeCaseToCamelCase(headline[k]), v -> fieldVals[v]))); + k -> StringUtils.snakeCaseToCamelCase(headline[k]), v -> finalFieldVals[v]))); if (insensitiveFieldsToAttributes.size() != headline.length) { Set fieldsToAttributesKeySet = insensitiveFieldsToAttributes.keySet(); @@ -112,6 +130,23 @@ private Map buildFieldsToAttributes( return insensitiveFieldsToAttributes; } + /** + * Parse a given row of a valid RFC 4180 formatted csv row + * + * @param csvSep separator of the csv file + * @param csvRow the valid row + * @return an array with the csv field values as strings + */ + protected String[] parseCsvRow(String csvSep, String csvRow) { + return Arrays.stream(csvRow.split(csvSep + "(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1)) + .map( + maybeStartEndQuotedString -> + StringUtils.unquoteStartEnd(maybeStartEndQuotedString.trim()) + .replaceAll("\"{2}", "\"") + .trim()) + .toArray(String[]::new); + } + /** * Build an array of from the provided csv row string considering special cases where geoJson or * {@link edu.ie3.datamodel.models.input.system.characteristic.CharacteristicInput} are provided @@ -120,8 +155,10 @@ private Map buildFieldsToAttributes( * @param csvSep the column separator of the csv row string * @param csvRow the csv row string * @return an array with one entry per column of the provided csv row string + * @deprecated only left for downward compatibility. Will be removed in a major release */ - private String[] fieldVals(String csvSep, String csvRow) { + @Deprecated + private String[] oldFieldVals(String csvSep, String csvRow) { /*geo json support*/ final String geoJsonRegex = "[\\{].+?\\}\\}\\}"; @@ -304,7 +341,7 @@ protected Stream> buildStreamWithFieldsToAttributesMap( Class entityClass, CsvFileConnector connector) { try (BufferedReader reader = connector.initReader(entityClass)) { - final String[] headline = parseCsvHeadline(reader.readLine(), csvSep); + final String[] headline = parseCsvRow(reader.readLine(), csvSep); // sanity check for headline if (!Arrays.asList(headline).contains("uuid")) { @@ -342,10 +379,6 @@ protected List> csvRowFieldValueMapping( .collect(Collectors.toList()); } - protected String[] parseCsvHeadline(String csvHeadline, String csvSep) { - return csvHeadline.replaceAll("\"", "").toLowerCase().split(csvSep); - } - /** * Returns a collection of maps each representing a row in csv file that can be used to built an * instance of a {@link UniqueEntity}. The uniqueness of each row is doubled checked by a) that no diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSource.java index 701c1773a..d37da2cdc 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvIdCoordinateSource.java @@ -116,7 +116,7 @@ private Stream> buildStreamWithFieldsToAttributesMap( // As we still want to consume the data at other places, we start a new stream instead of // returning the original one try (BufferedReader reader = connector.initReader(filename)) { - final String[] headline = parseCsvHeadline(reader.readLine(), csvSep); + final String[] headline = parseCsvRow(reader.readLine(), csvSep); if (!Arrays.asList(headline).containsAll(Arrays.asList("id", "lat", "lon"))) { throw new SourceException( diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy index c419f3643..8f3181816 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy @@ -46,9 +46,14 @@ class CsvDataSourceTest extends Specification { return super.distinctRowsWithLog(entityClass, allRows) } - String[] fieldVals( + String[] parseCsvRow( String csvSep, String csvRow) { - return super.fieldVals(csvSep, csvRow) + return super.parseCsvRow(csvSep, csvRow) + } + + String[] oldFieldVals( + String csvSep, String csvRow) { + return super.oldFieldVals(csvSep, csvRow) } } @@ -99,9 +104,9 @@ class CsvDataSourceTest extends Specification { } - def "A CsvDataSource should be able to handle a variety of different csvRows correctly"() { + def "A CsvDataSource should be able to handle deprecated invalid csvRows correctly"() { expect: - dummyCsvSource.fieldVals(csvSep, csvRow) as List == resultingArray + dummyCsvSource.oldFieldVals(csvSep, csvRow) as List == resultingArray where: csvSep | csvRow || resultingArray @@ -173,7 +178,30 @@ class CsvDataSourceTest extends Specification { "{\"type\":\"LineString\",\"coordinates\":[[7.4116482,51.4843281],[7.4116482,51.4843281]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "{\"type\":\"Point\",\"coordinates\":[0.25423729,0.75409836],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:0\"}}}" ] - "," | "4ca90220-74c2-4369-9afa-a18bf068840d,{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528],\"\"crs\"\":{\"\"type\"\":\"\"name\"\",\"\"properties\"\":{\"\"name\"\":\"\"EPSG:4326\"\"}}},node_a,2020-03-25T15:11:31Z[UTC],2020-03-24T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,true,1,1.0,Höchstspannung,380.0,\"olm:{(0.00,1.00)}\",\"cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}\"" || [ + "," | "4ca90220-74c2-4369-9afa-a18bf068840d,{\"\"type\"\":\"\"Point\"\",\"\"coordinates\"\":[7.411111,51.492528],\"\"crs\"\":{\"\"type\"\":\"\"name\"\",\"\"properties\"\":{\"\"name\"\":\"\"EPSG:4326\"\"}}},node_a,2020-03-25T15:11:31Z[UTC],2020-03-24T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,true,1,1.0,Höchstspannung,380.0,\"olm:{(0.00,1.00)}\",\"cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}\"" || [ + "4ca90220-74c2-4369-9afa-a18bf068840d", + "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "node_a", + "2020-03-25T15:11:31Z[UTC]", + "2020-03-24T15:11:31Z[UTC]", + "8f9682df-0744-4b58-a122-f0dc730f6510", + "true", + "1", + "1.0", + "Höchstspannung", + "380.0", + "olm:{(0.00,1.00)}", + "cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" + ] + } + + def "A CsvDataSource should be able to handle a variety of different csvRows correctly"() { + expect: + dummyCsvSource.parseCsvRow(csvSep, csvRow) as List == resultingArray + + where: + csvSep | csvRow || resultingArray + "," | "\"4ca90220-74c2-4369-9afa-a18bf068840d\",\"{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}\",\"node_a\",\"2020-03-25T15:11:31Z[UTC]\",\"2020-03-24T15:11:31Z[UTC]\",\"8f9682df-0744-4b58-a122-f0dc730f6510\",\"true\",\"1\",\"1.0\",\"Höchstspannung\",\"380.0\",\"olm:{(0.00,1.00)}\",\"cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}\"" || [ "4ca90220-74c2-4369-9afa-a18bf068840d", "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "node_a", @@ -188,6 +216,90 @@ class CsvDataSourceTest extends Specification { "olm:{(0.00,1.00)}", "cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" ] + ";" | "\"4ca90220-74c2-4369-9afa-a18bf068840d\";\"{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}\";\"node_a\";\"2020-03-25T15:11:31Z[UTC]\";\"2020-03-24T15:11:31Z[UTC]\";\"8f9682df-0744-4b58-a122-f0dc730f6510\";\"true\";\"1\";\"1.0\";\"Höchstspannung\";\"380.0\";\"olm:{(0.00,1.00)}\";\"cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}\"" || [ + "4ca90220-74c2-4369-9afa-a18bf068840d", + "{\"type\":\"Point\",\"coordinates\":[7.411111,51.492528],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", + "node_a", + "2020-03-25T15:11:31Z[UTC]", + "2020-03-24T15:11:31Z[UTC]", + "8f9682df-0744-4b58-a122-f0dc730f6510", + "true", + "1", + "1.0", + "Höchstspannung", + "380.0", + "olm:{(0.00,1.00)}", + "cosPhiP:{(0.0,1.0),(0.9,1.0),(1.2,-0.3)}" + ] + "," | "1,abc,def,\"He said \"\"run, run\"\"\", 6.0, \"thats \"\"good\"\"\"" || [ + "1", + "abc", + "def", + "He said \"run, run\"", + "6.0", + "thats \"good\"" + ] + ";" | "1;abc;def;\"He said \"\"run, run\"\"\"; 6.0; \"thats \"\"good\"\"\"" || [ + "1", + "abc", + "def", + "He said \"run, run\"", + "6.0", + "thats \"good\"" + ] + ";" | "1;abc;def;\"He said \"\"run; run\"\"\"; 6.0; \"thats \"\"good\"\"\"" || [ + "1", + "abc", + "def", + "He said \"run; run\"", + "6.0", + "thats \"good\"" + ] + "," | "1,abc,def,\"He said \"\"test, test\"\" and was happy\", 5.0" || [ + "1", + "abc", + "def", + "He said \"test, test\" and was happy", + "5.0" + ] + "," | "1,abc,def,\"He said \"\"test, test\"\" and was happy\",\"obviously, yet.\", 5.0" || [ + "1", + "abc", + "def", + "He said \"test, test\" and was happy", + "obviously, yet.", + "5.0" + ] + "," | "1,abc,def,\"He said \"\"test, test\"\"\", 5.0" || [ + "1", + "abc", + "def", + "He said \"test, test\"", + "5.0" + ] + "," | "1,abc,def,\"He said \"\"test, test\"\"\"" || [ + "1", + "abc", + "def", + "He said \"test, test\"" + ] + "," | "1,abc,def,\"He said \"\"test, test\"\" and was happy\", 5.0, \"... and felt like a \"\"genius\"\" with this.\"" || [ + "1", + "abc", + "def", + "He said \"test, test\" and was happy", + "5.0", + "... and felt like a \"genius\" with this." + ] + "," | "1,abc,def,\"He said \"\"test, test\"\" and was happy\", 5.0, \"... and felt like a \"\"genius\"\" with this.\"," || [ + "1", + "abc", + "def", + "He said \"test, test\" and was happy", + "5.0", + "... and felt like a \"genius\" with this.", + "" + ] } @@ -251,10 +363,10 @@ class CsvDataSourceTest extends Specification { dummyCsvSource.getFirstOrDefaultOperator(operators, operatorUuid, entityClassName, requestEntityUuid) == expectedOperator where: - operatorUuid | operators | entityClassName | requestEntityUuid || expectedOperator + operatorUuid | operators | entityClassName | requestEntityUuid || expectedOperator "8f9682df-0744-4b58-a122-f0dc730f6510" | [sptd.hpInput.operator]| "TestEntityClass" | "8f9682df-0744-4b58-a122-f0dc730f6511" || sptd.hpInput.operator "8f9682df-0744-4b58-a122-f0dc730f6520" | [sptd.hpInput.operator]| "TestEntityClass" | "8f9682df-0744-4b58-a122-f0dc730f6511" || OperatorInput.NO_OPERATOR_ASSIGNED - "8f9682df-0744-4b58-a122-f0dc730f6510" | []| "TestEntityClass"|"8f9682df-0744-4b58-a122-f0dc730f6511" || OperatorInput.NO_OPERATOR_ASSIGNED + "8f9682df-0744-4b58-a122-f0dc730f6510" | []| "TestEntityClass" | "8f9682df-0744-4b58-a122-f0dc730f6511" || OperatorInput.NO_OPERATOR_ASSIGNED } From 93d618d09fb8665aaea6c5b6a9ff6243178dedbc Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Thu, 18 Jun 2020 20:47:50 +0200 Subject: [PATCH 24/30] fixed invalid method parameter order in CsvDataSource#parseCsvRow --- .../java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java | 6 +++--- .../ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java index 300d16331..8ce532ed1 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java @@ -80,7 +80,7 @@ private Map buildFieldsToAttributes( // when replacing deprecated workaround add final modifier before parseCsvRow as well as remove // 'finalFieldVals' below! - String[] fieldVals = parseCsvRow(csvSep, csvRow); + String[] fieldVals = parseCsvRow(csvRow, csvSep); // start workaround for deprecated data model processing if (fieldVals.length != headline.length) { @@ -133,11 +133,11 @@ private Map buildFieldsToAttributes( /** * Parse a given row of a valid RFC 4180 formatted csv row * - * @param csvSep separator of the csv file * @param csvRow the valid row + * @param csvSep separator of the csv file * @return an array with the csv field values as strings */ - protected String[] parseCsvRow(String csvSep, String csvRow) { + protected String[] parseCsvRow(String csvRow, String csvSep) { return Arrays.stream(csvRow.split(csvSep + "(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1)) .map( maybeStartEndQuotedString -> diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy index 8f3181816..7605d9c26 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy @@ -47,8 +47,8 @@ class CsvDataSourceTest extends Specification { } String[] parseCsvRow( - String csvSep, String csvRow) { - return super.parseCsvRow(csvSep, csvRow) + String csvRow,String csvSep) { + return super.parseCsvRow(csvRow, csvSep) } String[] oldFieldVals( @@ -197,7 +197,7 @@ class CsvDataSourceTest extends Specification { def "A CsvDataSource should be able to handle a variety of different csvRows correctly"() { expect: - dummyCsvSource.parseCsvRow(csvSep, csvRow) as List == resultingArray + dummyCsvSource.parseCsvRow(csvRow, csvSep) as List == resultingArray where: csvSep | csvRow || resultingArray From d6b14ee12a85ba2e262715fb132c4c46de34b1cb Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Thu, 18 Jun 2020 21:01:21 +0200 Subject: [PATCH 25/30] updated spock testing framework version --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 1f43169fb..ebd57aa25 100644 --- a/build.gradle +++ b/build.gradle @@ -65,7 +65,7 @@ dependencies { // testing testCompile 'org.junit.jupiter:junit-jupiter:5.5.2' - testCompile 'org.spockframework:spock-core:2.0-M1-groovy-2.5' + testCompile 'org.spockframework:spock-core:2.0-M3-groovy-3.0' testCompile 'org.objenesis:objenesis:3.1' // Mock creation with constructor parameters // Testcontainers (Docker Framework for testing) From ffa0a13f68e9179be6728ffc8ce89b72d683f917 Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Thu, 18 Jun 2020 21:10:33 +0200 Subject: [PATCH 26/30] fix broken tests + addressing reviewers comments --- .../io/source/csv/CsvDataSource.java | 14 ++++++---- .../io/source/csv/CsvGraphicSourceTest.groovy | 26 +++++++++++-------- 2 files changed, 24 insertions(+), 16 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java index 8ce532ed1..354d7570b 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java @@ -56,6 +56,8 @@ public abstract class CsvDataSource { protected static final String TYPE = "type"; protected static final String FIELDS_TO_VALUES_MAP = "fieldsToValuesMap"; + @Deprecated private boolean notYetLoggedWarning = true; + public CsvDataSource(String csvSep, String folderPath, FileNamingStrategy fileNamingStrategy) { this.csvSep = csvSep; this.connector = new CsvFileConnector(folderPath, fileNamingStrategy); @@ -78,8 +80,9 @@ private Map buildFieldsToAttributes( TreeMap insensitiveFieldsToAttributes = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); - // when replacing deprecated workaround add final modifier before parseCsvRow as well as remove - // 'finalFieldVals' below! + // todo when replacing deprecated workaround code below add final modifier before parseCsvRow as + // well as remove + // 'finalFieldVals' and notYetLoggedWarning below! String[] fieldVals = parseCsvRow(csvRow, csvSep); // start workaround for deprecated data model processing @@ -87,13 +90,14 @@ private Map buildFieldsToAttributes( // try to parse old structure fieldVals = oldFieldVals(csvSep, csvRow); // if this works log a warning to inform the user that this will not work much longer, - // otherwise - // parsing will fail regularly as expected below - if (fieldVals.length == headline.length) + // otherwise parsing will fail regularly as expected below + if (fieldVals.length == headline.length && notYetLoggedWarning) { + notYetLoggedWarning = false; log.warn( "You are using an outdated version of the data " + "model with invalid formatted csv rows. This is okay for now, but please updated your files, as the " + "support for the old model will be removed soon."); + } } // end workaround for deprecated data model processing diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvGraphicSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvGraphicSourceTest.groovy index 73164c1a5..ed5fc2867 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvGraphicSourceTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvGraphicSourceTest.groovy @@ -9,12 +9,15 @@ import edu.ie3.datamodel.io.factory.input.graphics.LineGraphicInputEntityData import edu.ie3.datamodel.io.factory.input.graphics.NodeGraphicInputEntityData import edu.ie3.datamodel.io.source.RawGridSource import edu.ie3.datamodel.models.input.NodeInput +import edu.ie3.datamodel.models.input.OperatorInput import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput import edu.ie3.test.common.GridTestData as gtd import org.locationtech.jts.geom.LineString import org.locationtech.jts.geom.Point import spock.lang.Specification +import java.util.stream.Collectors + class CsvGraphicSourceTest extends Specification implements CsvTestDataMeta { @@ -39,17 +42,18 @@ class CsvGraphicSourceTest extends Specification implements CsvTestDataMeta { def "A CsvGraphicSource should process invalid input data as expected when requested to provide an instance of GraphicElements"() { given: def typeSource = new CsvTypeSource(csvSep, typeFolderPath, fileNamingStrategy) - def rawGridSource = Spy(CsvRawGridSource, constructorArgs: [ - csvSep, - gridFolderPath, - fileNamingStrategy, - typeSource - ]) { - // partly fake the return method of the csv raw grid source to always return empty node sets - // -> elements to build NodeGraphicInputs are missing - getNodes() >> new HashSet() - getNodes(_) >> new HashSet() - } as RawGridSource + def rawGridSource = + new CsvRawGridSource(csvSep, gridFolderPath, fileNamingStrategy, typeSource) { + @Override + Set getNodes() { + return Collections.emptySet() + } + + @Override + Set getNodes(Set operators) { + return Collections.emptySet() + } + } def csvGraphicSource = new CsvGraphicSource(csvSep, graphicsFolderPath, fileNamingStrategy, typeSource, rawGridSource) From 18f6b07547bbb2a5c80cd35fda63768450056085 Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Thu, 18 Jun 2020 21:27:24 +0200 Subject: [PATCH 27/30] adapt testfiles to new format + added documentation in CsvFileSinkTest --- .../groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy | 1 + .../resources/testGridFiles/graphics/line_graphic_input.csv | 4 ++-- .../resources/testGridFiles/graphics/node_graphic_input.csv | 6 +++--- src/test/resources/testGridFiles/grid/line_input.csv | 6 +++--- .../resources/testGridFiles/grid/measurement_unit_input.csv | 2 +- src/test/resources/testGridFiles/grid/node_input.csv | 4 ++-- src/test/resources/testGridFiles/grid/switch_input.csv | 2 +- .../resources/testGridFiles/grid/transformer2w_input.csv | 2 +- .../resources/testGridFiles/grid/transformer3w_input.csv | 2 +- src/test/resources/testGridFiles/grid_empty/line_input.csv | 2 +- .../testGridFiles/grid_empty/measurement_unit_input.csv | 2 +- src/test/resources/testGridFiles/grid_empty/node_input.csv | 2 +- .../resources/testGridFiles/grid_empty/switch_input.csv | 2 +- .../testGridFiles/grid_empty/transformer2w_input.csv | 2 +- .../testGridFiles/grid_empty/transformer3w_input.csv | 2 +- .../resources/testGridFiles/grid_malformed/line_input.csv | 6 +++--- .../testGridFiles/grid_malformed/measurement_unit_input.csv | 2 +- .../resources/testGridFiles/grid_malformed/node_input.csv | 4 ++-- .../resources/testGridFiles/grid_malformed/switch_input.csv | 2 +- .../testGridFiles/grid_malformed/transformer2w_input.csv | 2 +- .../testGridFiles/grid_malformed/transformer3w_input.csv | 2 +- src/test/resources/testGridFiles/participants/bm_input.csv | 4 ++-- src/test/resources/testGridFiles/participants/chp_input.csv | 4 ++-- .../participants/cylindrical_storage_input.csv | 2 +- src/test/resources/testGridFiles/participants/ev_input.csv | 4 ++-- .../testGridFiles/participants/fixed_feed_in_input.csv | 4 ++-- src/test/resources/testGridFiles/participants/hp_input.csv | 4 ++-- .../resources/testGridFiles/participants/load_input.csv | 4 ++-- src/test/resources/testGridFiles/participants/pv_input.csv | 4 ++-- .../resources/testGridFiles/participants/storage_input.csv | 4 ++-- .../testGridFiles/participants/thermal_bus_input.csv | 2 +- src/test/resources/testGridFiles/participants/wec_input.csv | 4 ++-- .../testGridFiles/thermal/cylindrical_storage_input.csv | 2 +- .../resources/testGridFiles/thermal/thermal_bus_input.csv | 2 +- .../resources/testGridFiles/thermal/thermal_house_input.csv | 2 +- src/test/resources/testGridFiles/types/bm_type_input.csv | 2 +- src/test/resources/testGridFiles/types/chp_type_input.csv | 2 +- src/test/resources/testGridFiles/types/ev_type_input.csv | 2 +- src/test/resources/testGridFiles/types/hp_type_input.csv | 2 +- src/test/resources/testGridFiles/types/line_type_input.csv | 2 +- src/test/resources/testGridFiles/types/operator_input.csv | 2 +- .../resources/testGridFiles/types/storage_type_input.csv | 2 +- .../testGridFiles/types/transformer2w_type_input.csv | 2 +- .../testGridFiles/types/transformer3w_type_input.csv | 2 +- src/test/resources/testGridFiles/types/wec_type_input.csv | 4 ++-- 45 files changed, 64 insertions(+), 63 deletions(-) diff --git a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy index fdbca3f26..ab2bcc470 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy @@ -48,6 +48,7 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { @Shared String testBaseFolderPath = "test" + // called automatically by spock (see http://spockframework.org/spock/docs/1.0/spock_primer.html - Fixture Methods) def cleanup() { // delete files after each test if they exist if (new File(testBaseFolderPath).exists()) { diff --git a/src/test/resources/testGridFiles/graphics/line_graphic_input.csv b/src/test/resources/testGridFiles/graphics/line_graphic_input.csv index e8787b7f5..3c20c2a0f 100644 --- a/src/test/resources/testGridFiles/graphics/line_graphic_input.csv +++ b/src/test/resources/testGridFiles/graphics/line_graphic_input.csv @@ -1,2 +1,2 @@ -"uuid","graphic_layer","line","path" -ece86139-3238-4a35-9361-457ecb4258b0,main,91ec3bcf-1777-4d38-af67-0bf7c9fa73c7,{"type":"LineString","coordinates":[[0.0,0.0],[0.0,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} +uuid,graphic_layer,line,path +ece86139-3238-4a35-9361-457ecb4258b0,main,91ec3bcf-1777-4d38-af67-0bf7c9fa73c7,"{""type"":""LineString"",""coordinates"":[[0.0,0.0],[0.0,10]],""crs"":{""type"":""name"",""properties"":{""name"":""EPSG:4326""}}}" diff --git a/src/test/resources/testGridFiles/graphics/node_graphic_input.csv b/src/test/resources/testGridFiles/graphics/node_graphic_input.csv index 3230663dc..82f7a04dc 100644 --- a/src/test/resources/testGridFiles/graphics/node_graphic_input.csv +++ b/src/test/resources/testGridFiles/graphics/node_graphic_input.csv @@ -1,3 +1,3 @@ -"uuid","graphic_layer","node","path","point" -09aec636-791b-45aa-b981-b14edf171c4c,main,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,,{"type":"Point","coordinates":[0.0,10],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} -9ecad435-bd16-4797-a732-762c09d4af25,main,6e0980e0-10f2-4e18-862b-eb2b7c90509b,{"type":"LineString","coordinates":[[-1,0.0],[1,0.0]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, +uuid,graphic_layer,node,path,point +09aec636-791b-45aa-b981-b14edf171c4c,main,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,,"{""type"":""Point"",""coordinates"":[0.0,10],""crs"":{""type"":""name"",""properties"":{""name"":""EPSG:4326""}}}" +9ecad435-bd16-4797-a732-762c09d4af25,main,6e0980e0-10f2-4e18-862b-eb2b7c90509b,"{""type"":""LineString"",""coordinates"":[[-1,0.0],[1,0.0]],""crs"":{""type"":""name"",""properties"":{""name"":""EPSG:4326""}}}", diff --git a/src/test/resources/testGridFiles/grid/line_input.csv b/src/test/resources/testGridFiles/grid/line_input.csv index aa129c0ae..c5f849c5c 100644 --- a/src/test/resources/testGridFiles/grid/line_input.csv +++ b/src/test/resources/testGridFiles/grid/line_input.csv @@ -1,3 +1,3 @@ -"uuid","geo_position","id","length","node_a","node_b","olm_characteristic","operates_from","operates_until","operator","parallel_devices","type" -92ec3bcf-1777-4d38-af67-0bf7c9fa73c7,{"type":"LineString","coordinates":[[7.411111,51.492528],[7.414116,51.484136]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}},test_line_AtoB,0.003,4ca90220-74c2-4369-9afa-a18bf068840d,47d29df0-ba2d-4d23-8e75-c82229c5c758,olm:{(0.00,1.00)},2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,2,3bed3eb3-9790-4874-89b5-a5434d408088 -91ec3bcf-1777-4d38-af67-0bf7c9fa73c7,{"type":"LineString","coordinates":[[7.411111,51.492528],[7.414116,51.484136]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}},test_line_CtoD,0.003,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,6e0980e0-10f2-4e18-862b-eb2b7c90509b,olm:{(0.00,1.00)},2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,2,3bed3eb3-9790-4874-89b5-a5434d408088 \ No newline at end of file +uuid,geo_position,id,length,node_a,node_b,olm_characteristic,operates_from,operates_until,operator,parallel_devices,type +92ec3bcf-1777-4d38-af67-0bf7c9fa73c7,"{""type"":""LineString"",""coordinates"":[[7.411111,51.492528],[7.414116,51.484136]],""crs"":{""type"":""name"",""properties"":{""name"":""EPSG:4326""}}}",test_line_AtoB,0.003,4ca90220-74c2-4369-9afa-a18bf068840d,47d29df0-ba2d-4d23-8e75-c82229c5c758,olm:{(0.00,1.00)},2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,2,3bed3eb3-9790-4874-89b5-a5434d408088 +91ec3bcf-1777-4d38-af67-0bf7c9fa73c7,"{""type"":""LineString"",""coordinates"":[[7.411111,51.492528],[7.414116,51.484136]],""crs"":{""type"":""name"",""properties"":{""name"":""EPSG:4326""}}}",test_line_CtoD,0.003,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,6e0980e0-10f2-4e18-862b-eb2b7c90509b,olm:{(0.00,1.00)},2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,2,3bed3eb3-9790-4874-89b5-a5434d408088 \ No newline at end of file diff --git a/src/test/resources/testGridFiles/grid/measurement_unit_input.csv b/src/test/resources/testGridFiles/grid/measurement_unit_input.csv index 2b3b231ce..d9e432af9 100644 --- a/src/test/resources/testGridFiles/grid/measurement_unit_input.csv +++ b/src/test/resources/testGridFiles/grid/measurement_unit_input.csv @@ -1,2 +1,2 @@ -"uuid","v_ang","v_mag","id","node","operates_from","operates_until","operator","p","q" +uuid,v_ang,v_mag,id,node,operates_from,operates_until,operator,p,q ce6119e3-f725-4166-b6e0-59f62e0c293d,true,true,test_measurementUnit,aaa74c1a-d07e-4615-99a5-e991f1d81cc4,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,true,true diff --git a/src/test/resources/testGridFiles/grid/node_input.csv b/src/test/resources/testGridFiles/grid/node_input.csv index fa1467fc1..8b2b387e5 100644 --- a/src/test/resources/testGridFiles/grid/node_input.csv +++ b/src/test/resources/testGridFiles/grid/node_input.csv @@ -1,5 +1,5 @@ -"uuid","geo_position","id","operates_from","operates_until","operator","slack","subnet","v_rated","v_target","volt_lvl" -4ca90220-74c2-4369-9afa-a18bf068840d,{"type":"Point","coordinates":[7.411111,51.492528],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}},node_a,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,true,1,380.0,1.0,Höchstspannung +uuid,geo_position,id,operates_from,operates_until,operator,slack,subnet,v_rated,v_target,volt_lvl +4ca90220-74c2-4369-9afa-a18bf068840d,"{""type"":""Point"",""coordinates"":[7.411111,51.492528],""crs"":{""type"":""name"",""properties"":{""name"":""EPSG:4326""}}}",node_a,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,true,1,380.0,1.0,Höchstspannung 47d29df0-ba2d-4d23-8e75-c82229c5c758,,node_b,,,,false,2,110.0,1.0,Hochspannung bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,,node_c,,,,false,3,20.0,1.0,Mittelspannung 6e0980e0-10f2-4e18-862b-eb2b7c90509b,,node_d,,,,false,4,20.0,1.0,Mittelspannung diff --git a/src/test/resources/testGridFiles/grid/switch_input.csv b/src/test/resources/testGridFiles/grid/switch_input.csv index 3e1b03feb..af37806f6 100644 --- a/src/test/resources/testGridFiles/grid/switch_input.csv +++ b/src/test/resources/testGridFiles/grid/switch_input.csv @@ -1,2 +1,2 @@ -"uuid","closed","id","node_a","node_b","operates_from","operates_until","operator" +uuid,closed,id,node_a,node_b,operates_from,operates_until,operator 5dc88077-aeb6-4711-9142-db57287640b1,true,test_switch_AtoB,4ca90220-74c2-4369-9afa-a18bf068840d,47d29df0-ba2d-4d23-8e75-c82229c5c758,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92 diff --git a/src/test/resources/testGridFiles/grid/transformer2w_input.csv b/src/test/resources/testGridFiles/grid/transformer2w_input.csv index 30973d9ab..564775304 100644 --- a/src/test/resources/testGridFiles/grid/transformer2w_input.csv +++ b/src/test/resources/testGridFiles/grid/transformer2w_input.csv @@ -1,4 +1,4 @@ -"uuid","auto_tap","id","node_a","node_b","operates_from","operates_until","operator","parallel_devices","tap_pos","type" +uuid,auto_tap,id,node_a,node_b,operates_from,operates_until,operator,parallel_devices,tap_pos,type 58247de7-e297-4d9b-a5e4-b662c058c655,true,2w_single_test,47d29df0-ba2d-4d23-8e75-c82229c5c758,6e0980e0-10f2-4e18-862b-eb2b7c90509b,,,,1,0,202069a7-bcf8-422c-837c-273575220c8a 8542bfa5-dc34-4367-b549-e9f515e6cced,true,2w_v_1,47d29df0-ba2d-4d23-8e75-c82229c5c758,98a3e7fa-c456-455b-a5ea-bb19e7cbeb63,,,,1,0,ac30443b-29e7-4635-b399-1062cfb3ffda 0c03391d-47e1-49b3-9c9c-1616258e78a7,true,2w_v_2,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,98a3e7fa-c456-455b-a5ea-bb19e7cbeb63,,,,1,0,8441dd78-c528-4e63-830d-52d341131432 diff --git a/src/test/resources/testGridFiles/grid/transformer3w_input.csv b/src/test/resources/testGridFiles/grid/transformer3w_input.csv index 6f50ab0f4..b970ceefb 100644 --- a/src/test/resources/testGridFiles/grid/transformer3w_input.csv +++ b/src/test/resources/testGridFiles/grid/transformer3w_input.csv @@ -1,2 +1,2 @@ -"uuid","auto_tap","id","node_a","node_b","node_c","operates_from","operates_until","operator","parallel_devices","tap_pos","type" +uuid,auto_tap,id,node_a,node_b,node_c,operates_from,operates_until,operator,parallel_devices,tap_pos,type cc327469-7d56-472b-a0df-edbb64f90e8f,true,3w_test,4ca90220-74c2-4369-9afa-a18bf068840d,47d29df0-ba2d-4d23-8e75-c82229c5c758,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,1,0,5b0ee546-21fb-4a7f-a801-5dbd3d7bb356 diff --git a/src/test/resources/testGridFiles/grid_empty/line_input.csv b/src/test/resources/testGridFiles/grid_empty/line_input.csv index 988018ac2..a4a8b4636 100644 --- a/src/test/resources/testGridFiles/grid_empty/line_input.csv +++ b/src/test/resources/testGridFiles/grid_empty/line_input.csv @@ -1 +1 @@ -"uuid","geo_position","id","length","node_a","node_b","olm_characteristic","operates_from","operates_until","operator","parallel_devices","type" \ No newline at end of file +uuid,geo_position,id,length,node_a,node_b,olm_characteristic,operates_from,operates_until,operator,parallel_devices,type \ No newline at end of file diff --git a/src/test/resources/testGridFiles/grid_empty/measurement_unit_input.csv b/src/test/resources/testGridFiles/grid_empty/measurement_unit_input.csv index 49a77a9a2..1e0a32d55 100644 --- a/src/test/resources/testGridFiles/grid_empty/measurement_unit_input.csv +++ b/src/test/resources/testGridFiles/grid_empty/measurement_unit_input.csv @@ -1 +1 @@ -"uuid","v_ang","v_mag","id","node","operates_from","operates_until","operator","p","q" +uuid,v_ang,v_mag,id,node,operates_from,operates_until,operator,p,q diff --git a/src/test/resources/testGridFiles/grid_empty/node_input.csv b/src/test/resources/testGridFiles/grid_empty/node_input.csv index 3cd04c530..c34b36d08 100644 --- a/src/test/resources/testGridFiles/grid_empty/node_input.csv +++ b/src/test/resources/testGridFiles/grid_empty/node_input.csv @@ -1 +1 @@ -"uuid","geo_position","id","operates_from","operates_until","operator","slack","subnet","v_rated","v_target","volt_lvl" +uuid,geo_position,id,operates_from,operates_until,operator,slack,subnet,v_rated,v_target,volt_lvl diff --git a/src/test/resources/testGridFiles/grid_empty/switch_input.csv b/src/test/resources/testGridFiles/grid_empty/switch_input.csv index 5f434403c..e62871fbb 100644 --- a/src/test/resources/testGridFiles/grid_empty/switch_input.csv +++ b/src/test/resources/testGridFiles/grid_empty/switch_input.csv @@ -1 +1 @@ -"uuid","closed","id","node_a","node_b","operates_from","operates_until","operator" +uuid,closed,id,node_a,node_b,operates_from,operates_until,operator diff --git a/src/test/resources/testGridFiles/grid_empty/transformer2w_input.csv b/src/test/resources/testGridFiles/grid_empty/transformer2w_input.csv index a6563c844..1f442d241 100644 --- a/src/test/resources/testGridFiles/grid_empty/transformer2w_input.csv +++ b/src/test/resources/testGridFiles/grid_empty/transformer2w_input.csv @@ -1 +1 @@ -"uuid","auto_tap","id","node_a","node_b","operates_from","operates_until","operator","parallel_devices","tap_pos","type" +uuid,auto_tap,id,node_a,node_b,operates_from,operates_until,operator,parallel_devices,tap_pos,type diff --git a/src/test/resources/testGridFiles/grid_empty/transformer3w_input.csv b/src/test/resources/testGridFiles/grid_empty/transformer3w_input.csv index bdc3fa827..2182afdd3 100644 --- a/src/test/resources/testGridFiles/grid_empty/transformer3w_input.csv +++ b/src/test/resources/testGridFiles/grid_empty/transformer3w_input.csv @@ -1 +1 @@ -"uuid","auto_tap","id","node_a","node_b","node_c","operates_from","operates_until","operator","parallel_devices","tap_pos","type" +uuid,auto_tap,id,node_a,node_b,node_c,operates_from,operates_until,operator,parallel_devices,tap_pos,type diff --git a/src/test/resources/testGridFiles/grid_malformed/line_input.csv b/src/test/resources/testGridFiles/grid_malformed/line_input.csv index aa129c0ae..c5f849c5c 100644 --- a/src/test/resources/testGridFiles/grid_malformed/line_input.csv +++ b/src/test/resources/testGridFiles/grid_malformed/line_input.csv @@ -1,3 +1,3 @@ -"uuid","geo_position","id","length","node_a","node_b","olm_characteristic","operates_from","operates_until","operator","parallel_devices","type" -92ec3bcf-1777-4d38-af67-0bf7c9fa73c7,{"type":"LineString","coordinates":[[7.411111,51.492528],[7.414116,51.484136]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}},test_line_AtoB,0.003,4ca90220-74c2-4369-9afa-a18bf068840d,47d29df0-ba2d-4d23-8e75-c82229c5c758,olm:{(0.00,1.00)},2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,2,3bed3eb3-9790-4874-89b5-a5434d408088 -91ec3bcf-1777-4d38-af67-0bf7c9fa73c7,{"type":"LineString","coordinates":[[7.411111,51.492528],[7.414116,51.484136]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}},test_line_CtoD,0.003,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,6e0980e0-10f2-4e18-862b-eb2b7c90509b,olm:{(0.00,1.00)},2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,2,3bed3eb3-9790-4874-89b5-a5434d408088 \ No newline at end of file +uuid,geo_position,id,length,node_a,node_b,olm_characteristic,operates_from,operates_until,operator,parallel_devices,type +92ec3bcf-1777-4d38-af67-0bf7c9fa73c7,"{""type"":""LineString"",""coordinates"":[[7.411111,51.492528],[7.414116,51.484136]],""crs"":{""type"":""name"",""properties"":{""name"":""EPSG:4326""}}}",test_line_AtoB,0.003,4ca90220-74c2-4369-9afa-a18bf068840d,47d29df0-ba2d-4d23-8e75-c82229c5c758,olm:{(0.00,1.00)},2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,2,3bed3eb3-9790-4874-89b5-a5434d408088 +91ec3bcf-1777-4d38-af67-0bf7c9fa73c7,"{""type"":""LineString"",""coordinates"":[[7.411111,51.492528],[7.414116,51.484136]],""crs"":{""type"":""name"",""properties"":{""name"":""EPSG:4326""}}}",test_line_CtoD,0.003,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,6e0980e0-10f2-4e18-862b-eb2b7c90509b,olm:{(0.00,1.00)},2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,2,3bed3eb3-9790-4874-89b5-a5434d408088 \ No newline at end of file diff --git a/src/test/resources/testGridFiles/grid_malformed/measurement_unit_input.csv b/src/test/resources/testGridFiles/grid_malformed/measurement_unit_input.csv index 2b3b231ce..d9e432af9 100644 --- a/src/test/resources/testGridFiles/grid_malformed/measurement_unit_input.csv +++ b/src/test/resources/testGridFiles/grid_malformed/measurement_unit_input.csv @@ -1,2 +1,2 @@ -"uuid","v_ang","v_mag","id","node","operates_from","operates_until","operator","p","q" +uuid,v_ang,v_mag,id,node,operates_from,operates_until,operator,p,q ce6119e3-f725-4166-b6e0-59f62e0c293d,true,true,test_measurementUnit,aaa74c1a-d07e-4615-99a5-e991f1d81cc4,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,true,true diff --git a/src/test/resources/testGridFiles/grid_malformed/node_input.csv b/src/test/resources/testGridFiles/grid_malformed/node_input.csv index d13a31d76..6a9f52e29 100644 --- a/src/test/resources/testGridFiles/grid_malformed/node_input.csv +++ b/src/test/resources/testGridFiles/grid_malformed/node_input.csv @@ -1,5 +1,5 @@ -"uuid","geo_position","id","operates_from","operates_until","operator","slack","subnet","v_rated","v_target","volt_lvl" -4ca90220-74c2-4369-9afa-a18bf068840d,{"type":"Point","coordinates":[7.411111,51.492528],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}},node_a,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,true,1,380.0,1.0,Höchstspannung +uuid,geo_position,id,operates_from,operates_until,operator,slack,subnet,v_rated,v_target,volt_lvl +4ca90220-74c2-4369-9afa-a18bf068840d,"{""type"":""Point"",""coordinates"":[7.411111,51.492528],""crs"":{""type"":""name"",""properties"":{""name"":""EPSG:4326""}}}",node_a,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,true,1,380.0,1.0,Höchstspannung 47d29df0-ba2d-4d23-8e75-c82229c5c758,,node_b,,,,false,2,110.0,1.0,Hochspannung bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,,node_c,,,,false,3,20.0,1.0,Mittelspannung 98a3e7fa-c456-455b-a5ea-bb19e7cbeb63,,node_e,,,,false,5,10.0,1.0,Mittelspannung diff --git a/src/test/resources/testGridFiles/grid_malformed/switch_input.csv b/src/test/resources/testGridFiles/grid_malformed/switch_input.csv index 3e1b03feb..af37806f6 100644 --- a/src/test/resources/testGridFiles/grid_malformed/switch_input.csv +++ b/src/test/resources/testGridFiles/grid_malformed/switch_input.csv @@ -1,2 +1,2 @@ -"uuid","closed","id","node_a","node_b","operates_from","operates_until","operator" +uuid,closed,id,node_a,node_b,operates_from,operates_until,operator 5dc88077-aeb6-4711-9142-db57287640b1,true,test_switch_AtoB,4ca90220-74c2-4369-9afa-a18bf068840d,47d29df0-ba2d-4d23-8e75-c82229c5c758,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92 diff --git a/src/test/resources/testGridFiles/grid_malformed/transformer2w_input.csv b/src/test/resources/testGridFiles/grid_malformed/transformer2w_input.csv index 30973d9ab..564775304 100644 --- a/src/test/resources/testGridFiles/grid_malformed/transformer2w_input.csv +++ b/src/test/resources/testGridFiles/grid_malformed/transformer2w_input.csv @@ -1,4 +1,4 @@ -"uuid","auto_tap","id","node_a","node_b","operates_from","operates_until","operator","parallel_devices","tap_pos","type" +uuid,auto_tap,id,node_a,node_b,operates_from,operates_until,operator,parallel_devices,tap_pos,type 58247de7-e297-4d9b-a5e4-b662c058c655,true,2w_single_test,47d29df0-ba2d-4d23-8e75-c82229c5c758,6e0980e0-10f2-4e18-862b-eb2b7c90509b,,,,1,0,202069a7-bcf8-422c-837c-273575220c8a 8542bfa5-dc34-4367-b549-e9f515e6cced,true,2w_v_1,47d29df0-ba2d-4d23-8e75-c82229c5c758,98a3e7fa-c456-455b-a5ea-bb19e7cbeb63,,,,1,0,ac30443b-29e7-4635-b399-1062cfb3ffda 0c03391d-47e1-49b3-9c9c-1616258e78a7,true,2w_v_2,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,98a3e7fa-c456-455b-a5ea-bb19e7cbeb63,,,,1,0,8441dd78-c528-4e63-830d-52d341131432 diff --git a/src/test/resources/testGridFiles/grid_malformed/transformer3w_input.csv b/src/test/resources/testGridFiles/grid_malformed/transformer3w_input.csv index 6f50ab0f4..b970ceefb 100644 --- a/src/test/resources/testGridFiles/grid_malformed/transformer3w_input.csv +++ b/src/test/resources/testGridFiles/grid_malformed/transformer3w_input.csv @@ -1,2 +1,2 @@ -"uuid","auto_tap","id","node_a","node_b","node_c","operates_from","operates_until","operator","parallel_devices","tap_pos","type" +uuid,auto_tap,id,node_a,node_b,node_c,operates_from,operates_until,operator,parallel_devices,tap_pos,type cc327469-7d56-472b-a0df-edbb64f90e8f,true,3w_test,4ca90220-74c2-4369-9afa-a18bf068840d,47d29df0-ba2d-4d23-8e75-c82229c5c758,bd837a25-58f3-44ac-aa90-c6b6e3cd91b2,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],f15105c4-a2de-4ab8-a621-4bc98e372d92,1,0,5b0ee546-21fb-4a7f-a801-5dbd3d7bb356 diff --git a/src/test/resources/testGridFiles/participants/bm_input.csv b/src/test/resources/testGridFiles/participants/bm_input.csv index 1eb38253e..ce7d8b2b6 100644 --- a/src/test/resources/testGridFiles/participants/bm_input.csv +++ b/src/test/resources/testGridFiles/participants/bm_input.csv @@ -1,2 +1,2 @@ -"uuid","cost_controlled","feed_in_tariff","id","market_reaction","node","operates_from","operates_until","operator","q_characteristics","type" -d06e5bb7-a3c7-4749-bdd1-4581ff2f6f4d,false,10.0,test_bmInput,false,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,qV:{(0.90,-0.30),(0.95,0.00),(1.05,0.00),(1.10,0.30)},5ebd8f7e-dedb-4017-bb86-6373c4b68eb8 +uuid,cost_controlled,feed_in_tariff,id,market_reaction,node,operates_from,operates_until,operator,q_characteristics,type +d06e5bb7-a3c7-4749-bdd1-4581ff2f6f4d,false,10.0,test_bmInput,false,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,"qV:{(0.90,-0.30),(0.95,0.00),(1.05,0.00),(1.10,0.30)}",5ebd8f7e-dedb-4017-bb86-6373c4b68eb8 diff --git a/src/test/resources/testGridFiles/participants/chp_input.csv b/src/test/resources/testGridFiles/participants/chp_input.csv index 55ee77721..71a873f1a 100644 --- a/src/test/resources/testGridFiles/participants/chp_input.csv +++ b/src/test/resources/testGridFiles/participants/chp_input.csv @@ -1,2 +1,2 @@ -"uuid","id","market_reaction","node","operates_from","operates_until","operator","q_characteristics","thermal_bus","thermal_storage","type" -9981b4d7-5a8e-4909-9602-e2e7ef4fca5c,test_chpInput,false,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,cosPhiFixed:{(0.00,0.95)},0d95d7f2-49fb-4d49-8636-383a5220384e,8851813b-3a7d-4fee-874b-4df9d724e4b3,5ebd8f7e-dedb-4017-bb86-6373c4b68eb8 +uuid,id,market_reaction,node,operates_from,operates_until,operator,q_characteristics,thermal_bus,thermal_storage,type +9981b4d7-5a8e-4909-9602-e2e7ef4fca5c,test_chpInput,false,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,"cosPhiFixed:{(0.00,0.95)}",0d95d7f2-49fb-4d49-8636-383a5220384e,8851813b-3a7d-4fee-874b-4df9d724e4b3,5ebd8f7e-dedb-4017-bb86-6373c4b68eb8 diff --git a/src/test/resources/testGridFiles/participants/cylindrical_storage_input.csv b/src/test/resources/testGridFiles/participants/cylindrical_storage_input.csv index e90b5d160..7d9de239a 100644 --- a/src/test/resources/testGridFiles/participants/cylindrical_storage_input.csv +++ b/src/test/resources/testGridFiles/participants/cylindrical_storage_input.csv @@ -1,2 +1,2 @@ -"uuid","c","id","inlet_temp","operates_from","operates_until","operator","return_temp","storage_volume_lvl","storage_volume_lvl_min","thermal_bus" +uuid,c,id,inlet_temp,operates_from,operates_until,operator,return_temp,storage_volume_lvl,storage_volume_lvl_min,thermal_bus 8851813b-3a7d-4fee-874b-4df9d724e4b3,1.0,test_cylindricThermalStorage,110.0,,,7d6f1763-0c1d-4266-a76f-59163ad3808b,80.0,1.039154027,0.3,0d95d7f2-49fb-4d49-8636-383a5220384e diff --git a/src/test/resources/testGridFiles/participants/ev_input.csv b/src/test/resources/testGridFiles/participants/ev_input.csv index bcc850e0d..586c162ca 100644 --- a/src/test/resources/testGridFiles/participants/ev_input.csv +++ b/src/test/resources/testGridFiles/participants/ev_input.csv @@ -1,2 +1,2 @@ -"uuid","id","node","operates_from","operates_until","operator","q_characteristics","type" -a17be20f-c7a7-471d-8ffe-015487c9d022,test_evInput,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,cosPhiFixed:{(0.00,0.95)},5ebd8f7e-dedb-4017-bb86-6373c4b68eb8 +uuid,id,node,operates_from,operates_until,operator,q_characteristics,type +a17be20f-c7a7-471d-8ffe-015487c9d022,test_evInput,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,"cosPhiFixed:{(0.00,0.95)}",5ebd8f7e-dedb-4017-bb86-6373c4b68eb8 diff --git a/src/test/resources/testGridFiles/participants/fixed_feed_in_input.csv b/src/test/resources/testGridFiles/participants/fixed_feed_in_input.csv index abcc51d41..0cf4a3ec4 100644 --- a/src/test/resources/testGridFiles/participants/fixed_feed_in_input.csv +++ b/src/test/resources/testGridFiles/participants/fixed_feed_in_input.csv @@ -1,2 +1,2 @@ -"uuid","cosphi_rated","id","node","operates_from","operates_until","operator","q_characteristics","s_rated" -717af017-cc69-406f-b452-e022d7fb516a,0.95,test_fixedFeedInInput,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,cosPhiFixed:{(0.00,0.95)},25.0 +uuid,cosphi_rated,id,node,operates_from,operates_until,operator,q_characteristics,s_rated +717af017-cc69-406f-b452-e022d7fb516a,0.95,test_fixedFeedInInput,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,"cosPhiFixed:{(0.00,0.95)}",25.0 diff --git a/src/test/resources/testGridFiles/participants/hp_input.csv b/src/test/resources/testGridFiles/participants/hp_input.csv index 276da655d..d99bf92b8 100644 --- a/src/test/resources/testGridFiles/participants/hp_input.csv +++ b/src/test/resources/testGridFiles/participants/hp_input.csv @@ -1,2 +1,2 @@ -"uuid","id","node","operates_from","operates_until","operator","q_characteristics","thermal_bus","type" -798028b5-caff-4da7-bcd9-1750fdd8742b,test_hpInput,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,cosPhiFixed:{(0.00,0.95)},0d95d7f2-49fb-4d49-8636-383a5220384e,5ebd8f7e-dedb-4017-bb86-6373c4b68eb8 +uuid,id,node,operates_from,operates_until,operator,q_characteristics,thermal_bus,type +798028b5-caff-4da7-bcd9-1750fdd8742b,test_hpInput,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,"cosPhiFixed:{(0.00,0.95)}",0d95d7f2-49fb-4d49-8636-383a5220384e,5ebd8f7e-dedb-4017-bb86-6373c4b68eb8 diff --git a/src/test/resources/testGridFiles/participants/load_input.csv b/src/test/resources/testGridFiles/participants/load_input.csv index 119e5af0d..749944c74 100644 --- a/src/test/resources/testGridFiles/participants/load_input.csv +++ b/src/test/resources/testGridFiles/participants/load_input.csv @@ -1,2 +1,2 @@ -"uuid","cosphi_rated","dsm","e_cons_annual","id","node","operates_from","operates_until","operator","q_characteristics","s_rated","standard_load_profile" -eaf77f7e-9001-479f-94ca-7fb657766f5f,0.95,false,4000.0,test_loadInput,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,cosPhiFixed:{(0.00,0.95)},25.0,h0 +uuid,cosphi_rated,dsm,e_cons_annual,id,node,operates_from,operates_until,operator,q_characteristics,s_rated,standard_load_profile +eaf77f7e-9001-479f-94ca-7fb657766f5f,0.95,false,4000.0,test_loadInput,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,"cosPhiFixed:{(0.00,0.95)}",25.0,h0 diff --git a/src/test/resources/testGridFiles/participants/pv_input.csv b/src/test/resources/testGridFiles/participants/pv_input.csv index b7db1c62d..51f1a8d20 100644 --- a/src/test/resources/testGridFiles/participants/pv_input.csv +++ b/src/test/resources/testGridFiles/participants/pv_input.csv @@ -1,2 +1,2 @@ -"uuid","albedo","azimuth","cosphi_rated","eta_conv","height","id","k_g","k_t","market_reaction","node","operates_from","operates_until","operator","q_characteristics","s_rated" -d56f15b7-8293-4b98-b5bd-58f6273ce229,0.20000000298023224,-8.926613807678223,0.95,98.0,41.01871871948242,test_pvInput,0.8999999761581421,1.0,false,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,cosPhiFixed:{(0.00,0.95)},25.0 +uuid,albedo,azimuth,cosphi_rated,eta_conv,height,id,k_g,k_t,market_reaction,node,operates_from,operates_until,operator,q_characteristics,s_rated +d56f15b7-8293-4b98-b5bd-58f6273ce229,0.20000000298023224,-8.926613807678223,0.95,98.0,41.01871871948242,test_pvInput,0.8999999761581421,1.0,false,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,"cosPhiFixed:{(0.00,0.95)}",25.0 diff --git a/src/test/resources/testGridFiles/participants/storage_input.csv b/src/test/resources/testGridFiles/participants/storage_input.csv index 59b42a955..82018665a 100644 --- a/src/test/resources/testGridFiles/participants/storage_input.csv +++ b/src/test/resources/testGridFiles/participants/storage_input.csv @@ -1,2 +1,2 @@ -"uuid","behaviour","id","node","operates_from","operates_until","operator","q_characteristics","type" -06b58276-8350-40fb-86c0-2414aa4a0452,market,test_storageInput,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,cosPhiFixed:{(0.00,0.95)},5ebd8f7e-dedb-4017-bb86-6373c4b68eb8 +uuid,behaviour,id,node,operates_from,operates_until,operator,q_characteristics,type +06b58276-8350-40fb-86c0-2414aa4a0452,market,test_storageInput,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,"cosPhiFixed:{(0.00,0.95)}",5ebd8f7e-dedb-4017-bb86-6373c4b68eb8 diff --git a/src/test/resources/testGridFiles/participants/thermal_bus_input.csv b/src/test/resources/testGridFiles/participants/thermal_bus_input.csv index e934eb0fc..3455049ec 100644 --- a/src/test/resources/testGridFiles/participants/thermal_bus_input.csv +++ b/src/test/resources/testGridFiles/participants/thermal_bus_input.csv @@ -1,2 +1,2 @@ -"uuid","id","operates_from","operates_until","operator" +uuid,id,operates_from,operates_until,operator 0d95d7f2-49fb-4d49-8636-383a5220384e,test_thermalBusInput,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510 diff --git a/src/test/resources/testGridFiles/participants/wec_input.csv b/src/test/resources/testGridFiles/participants/wec_input.csv index 2f74f4666..28b063b88 100644 --- a/src/test/resources/testGridFiles/participants/wec_input.csv +++ b/src/test/resources/testGridFiles/participants/wec_input.csv @@ -1,2 +1,2 @@ -"uuid","id","market_reaction","node","operates_from","operates_until","operator","q_characteristics","type" -ee7e2e37-a5ad-4def-a832-26a317567ca1,test_wecInput,false,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,cosPhiP:{(0.00,1.00),(0.90,1.00),(1.20,-0.30)},5ebd8f7e-dedb-4017-bb86-6373c4b68eb8 +uuid,id,market_reaction,node,operates_from,operates_until,operator,q_characteristics,type +ee7e2e37-a5ad-4def-a832-26a317567ca1,test_wecInput,false,4ca90220-74c2-4369-9afa-a18bf068840d,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,"cosPhiP:{(0.00,1.00),(0.90,1.00),(1.20,-0.30)}",5ebd8f7e-dedb-4017-bb86-6373c4b68eb8 diff --git a/src/test/resources/testGridFiles/thermal/cylindrical_storage_input.csv b/src/test/resources/testGridFiles/thermal/cylindrical_storage_input.csv index e90b5d160..7d9de239a 100644 --- a/src/test/resources/testGridFiles/thermal/cylindrical_storage_input.csv +++ b/src/test/resources/testGridFiles/thermal/cylindrical_storage_input.csv @@ -1,2 +1,2 @@ -"uuid","c","id","inlet_temp","operates_from","operates_until","operator","return_temp","storage_volume_lvl","storage_volume_lvl_min","thermal_bus" +uuid,c,id,inlet_temp,operates_from,operates_until,operator,return_temp,storage_volume_lvl,storage_volume_lvl_min,thermal_bus 8851813b-3a7d-4fee-874b-4df9d724e4b3,1.0,test_cylindricThermalStorage,110.0,,,7d6f1763-0c1d-4266-a76f-59163ad3808b,80.0,1.039154027,0.3,0d95d7f2-49fb-4d49-8636-383a5220384e diff --git a/src/test/resources/testGridFiles/thermal/thermal_bus_input.csv b/src/test/resources/testGridFiles/thermal/thermal_bus_input.csv index e934eb0fc..3455049ec 100644 --- a/src/test/resources/testGridFiles/thermal/thermal_bus_input.csv +++ b/src/test/resources/testGridFiles/thermal/thermal_bus_input.csv @@ -1,2 +1,2 @@ -"uuid","id","operates_from","operates_until","operator" +uuid,id,operates_from,operates_until,operator 0d95d7f2-49fb-4d49-8636-383a5220384e,test_thermalBusInput,2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510 diff --git a/src/test/resources/testGridFiles/thermal/thermal_house_input.csv b/src/test/resources/testGridFiles/thermal/thermal_house_input.csv index 8520f2329..f5ea59569 100644 --- a/src/test/resources/testGridFiles/thermal/thermal_house_input.csv +++ b/src/test/resources/testGridFiles/thermal/thermal_house_input.csv @@ -1,2 +1,2 @@ -"uuid","id","operates_from","operates_until","operator","thermal_bus","eth_losses","eth_capa" +uuid,id,operates_from,operates_until,operator,thermal_bus,eth_losses,eth_capa 717af017-cc69-406f-b452-e022d7fb516a,"test_thermalHouseInput",2020-03-24T15:11:31Z[UTC],2020-03-25T15:11:31Z[UTC],8f9682df-0744-4b58-a122-f0dc730f6510,0d95d7f2-49fb-4d49-8636-383a5220384e,10,20 \ No newline at end of file diff --git a/src/test/resources/testGridFiles/types/bm_type_input.csv b/src/test/resources/testGridFiles/types/bm_type_input.csv index 7f1509598..742bbfb0e 100644 --- a/src/test/resources/testGridFiles/types/bm_type_input.csv +++ b/src/test/resources/testGridFiles/types/bm_type_input.csv @@ -1,2 +1,2 @@ -"uuid","active_power_gradient","capex","cosphi_rated","eta_conv","id","opex","s_rated" +uuid,active_power_gradient,capex,cosphi_rated,eta_conv,id,opex,s_rated 5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,25.0,100.0,0.95,98.0,test_bmTypeInput,50.0,25.0 diff --git a/src/test/resources/testGridFiles/types/chp_type_input.csv b/src/test/resources/testGridFiles/types/chp_type_input.csv index 91fd16803..4e19b862a 100644 --- a/src/test/resources/testGridFiles/types/chp_type_input.csv +++ b/src/test/resources/testGridFiles/types/chp_type_input.csv @@ -1,2 +1,2 @@ -"uuid","capex","cosphi_rated","eta_el","eta_thermal","id","opex","p_own","p_thermal","s_rated" +uuid,capex,cosphi_rated,eta_el,eta_thermal,id,opex,p_own,p_thermal,s_rated 5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,100.0,0.95,19.0,76.0,test_chpType,50.0,0.0,9.0,25.0 diff --git a/src/test/resources/testGridFiles/types/ev_type_input.csv b/src/test/resources/testGridFiles/types/ev_type_input.csv index bdc61032f..ab07e35fa 100644 --- a/src/test/resources/testGridFiles/types/ev_type_input.csv +++ b/src/test/resources/testGridFiles/types/ev_type_input.csv @@ -1,2 +1,2 @@ -"uuid","capex","cosphi_rated","e_cons","e_storage","id","opex","s_rated" +uuid,capex,cosphi_rated,e_cons,e_storage,id,opex,s_rated 5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,100.0,0.95,5.0,100.0,test_evTypeInput,50.0,25.0 diff --git a/src/test/resources/testGridFiles/types/hp_type_input.csv b/src/test/resources/testGridFiles/types/hp_type_input.csv index 083331c61..6f943036a 100644 --- a/src/test/resources/testGridFiles/types/hp_type_input.csv +++ b/src/test/resources/testGridFiles/types/hp_type_input.csv @@ -1,2 +1,2 @@ -"uuid","capex","cosphi_rated","id","opex","p_thermal","s_rated" +uuid,capex,cosphi_rated,id,opex,p_thermal,s_rated 5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,100.0,0.95,test_hpTypeInput,50.0,9.0,25.0 diff --git a/src/test/resources/testGridFiles/types/line_type_input.csv b/src/test/resources/testGridFiles/types/line_type_input.csv index 7d98b56e7..686900bf9 100644 --- a/src/test/resources/testGridFiles/types/line_type_input.csv +++ b/src/test/resources/testGridFiles/types/line_type_input.csv @@ -1,2 +1,2 @@ -"uuid","b","g","i_max","id","r","v_rated","x" +uuid,b,g,i_max,id,r,v_rated,x 3bed3eb3-9790-4874-89b5-a5434d408088,0.00322,0.0,300.0,lineType_AtoB,0.437,20.0,0.356 \ No newline at end of file diff --git a/src/test/resources/testGridFiles/types/operator_input.csv b/src/test/resources/testGridFiles/types/operator_input.csv index bfd876318..89506dcfd 100644 --- a/src/test/resources/testGridFiles/types/operator_input.csv +++ b/src/test/resources/testGridFiles/types/operator_input.csv @@ -1,3 +1,3 @@ -"uuid","id" +uuid,id 8f9682df-0744-4b58-a122-f0dc730f6510,TestOperator f15105c4-a2de-4ab8-a621-4bc98e372d92,Univ.-Prof. Dr. rer. hort. Klaus-Dieter Brokkoli \ No newline at end of file diff --git a/src/test/resources/testGridFiles/types/storage_type_input.csv b/src/test/resources/testGridFiles/types/storage_type_input.csv index 90b73b87d..1c437b952 100644 --- a/src/test/resources/testGridFiles/types/storage_type_input.csv +++ b/src/test/resources/testGridFiles/types/storage_type_input.csv @@ -1,2 +1,2 @@ -"uuid","active_power_gradient","capex","cosphi_rated","dod","e_storage","eta","id","life_cycle","life_time","opex","p_max","s_rated" +uuid,active_power_gradient,capex,cosphi_rated,dod,e_storage,eta,id,life_cycle,life_time,opex,p_max,s_rated 5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,100.0,100.0,0.95,10.0,100.0,95.0,test_storageTypeInput,100,175316.4,50.0,15.0,25.0 diff --git a/src/test/resources/testGridFiles/types/transformer2w_type_input.csv b/src/test/resources/testGridFiles/types/transformer2w_type_input.csv index a1f6b5f71..841e2c926 100644 --- a/src/test/resources/testGridFiles/types/transformer2w_type_input.csv +++ b/src/test/resources/testGridFiles/types/transformer2w_type_input.csv @@ -1,4 +1,4 @@ -"uuid","b_m","d_phi","d_v","g_m","id","r_sc","s_rated","tap_max","tap_min","tap_neutr","tap_side","v_rated_a","v_rated_b","x_sc" +uuid,b_m,d_phi,d_v,g_m,id,r_sc,s_rated,tap_max,tap_min,tap_neutr,tap_side,v_rated_a,v_rated_b,x_sc 202069a7-bcf8-422c-837c-273575220c8a,0.0,0.0,1.5,0.0,HS-MS_1,45.375,20000.0,10,-10,0,false,110.0,20.0,102.759 ac30443b-29e7-4635-b399-1062cfb3ffda,0.0,0.0,1.777780055999756,0.0,transformer_type_gedfi89fc7c895076ff25ec6d3b2e7ab9a1b24b37f73ecf30f895005d766a8d8d2774aa,0.0,40000.0,19,1,10,false,110.0,10.0,51.72750115394592 8441dd78-c528-4e63-830d-52d341131432,0.0,0.0,1.5,0.0,no_shunt_elements_mv-mv,1.5,250.0,5,-5,0,false,20.0,10.0,15.5 diff --git a/src/test/resources/testGridFiles/types/transformer3w_type_input.csv b/src/test/resources/testGridFiles/types/transformer3w_type_input.csv index bdeb0f8d7..bb078edac 100644 --- a/src/test/resources/testGridFiles/types/transformer3w_type_input.csv +++ b/src/test/resources/testGridFiles/types/transformer3w_type_input.csv @@ -1,2 +1,2 @@ -"uuid","b_m","d_phi","d_v","g_m","id","r_sc_a","r_sc_b","r_sc_c","s_rated_a","s_rated_b","s_rated_c","tap_max","tap_min","tap_neutr","v_rated_a","v_rated_b","v_rated_c","x_sc_a","x_sc_b","x_sc_c" +uuid,b_m,d_phi,d_v,g_m,id,r_sc_a,r_sc_b,r_sc_c,s_rated_a,s_rated_b,s_rated_c,tap_max,tap_min,tap_neutr,v_rated_a,v_rated_b,v_rated_c,x_sc_a,x_sc_b,x_sc_c 5b0ee546-21fb-4a7f-a801-5dbd3d7bb356,1000.0,0.0,1.5,40000.0,HöS-HS-MS_1,0.3,0.025,8.0E-4,120000.0,60000.0,40000.0,10,-10,0,380.0,110.0,20.0,1.0,0.08,0.003 diff --git a/src/test/resources/testGridFiles/types/wec_type_input.csv b/src/test/resources/testGridFiles/types/wec_type_input.csv index 005e601d5..43184d4e0 100644 --- a/src/test/resources/testGridFiles/types/wec_type_input.csv +++ b/src/test/resources/testGridFiles/types/wec_type_input.csv @@ -1,2 +1,2 @@ -"uuid","capex","cosphi_rated","cp_characteristic","eta_conv","hub_height","id","opex","rotor_area","s_rated" -5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,100.0,0.95,cP:{(10.00,0.05),(15.00,0.10),(20.00,0.20)},98.0,200.0,test_wecType,50.0,20.0,25.0 +uuid,capex,cosphi_rated,cp_characteristic,eta_conv,hub_height,id,opex,rotor_area,s_rated +5ebd8f7e-dedb-4017-bb86-6373c4b68eb8,100.0,0.95,"cP:{(10.00,0.05),(15.00,0.10),(20.00,0.20)}",98.0,200.0,test_wecType,50.0,20.0,25.0 From 72656697e1fb57bb249ff7dd04e2c62c6731d996 Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Thu, 18 Jun 2020 21:35:23 +0200 Subject: [PATCH 28/30] addressing PMD issue in BufferedCsvWriter --- src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java b/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java index a9d1d94c6..3fe767bf5 100644 --- a/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java +++ b/src/main/java/edu/ie3/datamodel/io/csv/BufferedCsvWriter.java @@ -68,7 +68,7 @@ public void write(Map entityFieldData) throws IOException, SinkE * * @throws IOException If something is messed up */ - private void writeFileHeader(String[] headLineElements) throws IOException { + protected final void writeFileHeader(String[] headLineElements) throws IOException { writeOneLine(StringUtils.camelCaseToSnakeCase(headLineElements)); } @@ -78,7 +78,7 @@ private void writeFileHeader(String[] headLineElements) throws IOException { * @param entries Entries to write to the line of the file * @throws IOException If writing is not possible */ - private void writeOneLine(String[] entries) throws IOException { + protected final void writeOneLine(String[] entries) throws IOException { for (int i = 0; i < entries.length; i++) { String attribute = entries[i]; super.append(attribute); From f513a1ffd46e9e1bdb926430768d7029ae35935b Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Thu, 18 Jun 2020 21:42:00 +0200 Subject: [PATCH 29/30] addressing error prone implementations --- .../groovy/edu/ie3/test/common/ComplexTopology.groovy | 8 ++++---- src/test/groovy/edu/ie3/test/common/GridTestData.groovy | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/test/groovy/edu/ie3/test/common/ComplexTopology.groovy b/src/test/groovy/edu/ie3/test/common/ComplexTopology.groovy index 77361fe99..4b071c094 100644 --- a/src/test/groovy/edu/ie3/test/common/ComplexTopology.groovy +++ b/src/test/groovy/edu/ie3/test/common/ComplexTopology.groovy @@ -17,7 +17,7 @@ import org.jgrapht.graph.DirectedMultigraph import org.jgrapht.graph.SimpleDirectedGraph class ComplexTopology extends GridTestData { - public static gridName = "complex_topology" + public static final gridName = "complex_topology" private static final RawGridElements rawGrid = new RawGridElements( [ @@ -39,7 +39,7 @@ class ComplexTopology extends GridTestData { [] as Set, [] as Set) - public static grid = new JointGridContainer( + public static final grid = new JointGridContainer( gridName, rawGrid, new SystemParticipants( @@ -57,9 +57,9 @@ class ComplexTopology extends GridTestData { [] as Set, [] as Set)) - public static HashMap expectedSubGrids = new HashMap<>() + public static final HashMap expectedSubGrids = new HashMap<>() - public static SubGridTopologyGraph expectedSubGridTopology + public static final SubGridTopologyGraph expectedSubGridTopology static { expectedSubGrids.put(1, new SubGridContainer( diff --git a/src/test/groovy/edu/ie3/test/common/GridTestData.groovy b/src/test/groovy/edu/ie3/test/common/GridTestData.groovy index bb3c4f23f..3ac62ad80 100644 --- a/src/test/groovy/edu/ie3/test/common/GridTestData.groovy +++ b/src/test/groovy/edu/ie3/test/common/GridTestData.groovy @@ -299,7 +299,7 @@ class GridTestData { true ) - public static Transformer3WInput transformerAtoBtoC = new Transformer3WInput( + public static final Transformer3WInput transformerAtoBtoC = new Transformer3WInput( UUID.fromString("cc327469-7d56-472b-a0df-edbb64f90e8f"), "3w_test", profBroccoli, From a645e6289a2cbb59d5c946f956bec48cb7877d19 Mon Sep 17 00:00:00 2001 From: Johannes Hiry Date: Thu, 18 Jun 2020 21:45:52 +0200 Subject: [PATCH 30/30] minor code style adaptions --- .../input/InputEntityProcessorTest.groovy | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy index 4beb629b0..6f6937705 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/processor/input/InputEntityProcessorTest.groovy @@ -210,22 +210,22 @@ class InputEntityProcessorTest extends Specification { "node" : SystemParticipantTestData.wecInput.node.uuid.toString(), "operatesUntil" : SystemParticipantTestData.wecInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), "operatesFrom" : SystemParticipantTestData.wecInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), - "operator" : SystemParticipantTestData.wecInput.operator.getUuid().toString(), + "operator" : SystemParticipantTestData.wecInput.operator.uuid.toString(), "qCharacteristics": SystemParticipantTestData.cosPhiPDeSerialized, - "type" : SystemParticipantTestData.wecInput.type.getUuid().toString() + "type" : SystemParticipantTestData.wecInput.type.uuid.toString() ] ChpInput | SystemParticipantTestData.chpInput || [ "uuid" : SystemParticipantTestData.chpInput.uuid.toString(), "id" : SystemParticipantTestData.chpInput.id, "marketReaction" : SystemParticipantTestData.chpInput.marketReaction.toString(), - "node" : SystemParticipantTestData.chpInput.node.getUuid().toString(), + "node" : SystemParticipantTestData.chpInput.node.uuid.toString(), "operatesUntil" : SystemParticipantTestData.chpInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), "operatesFrom" : SystemParticipantTestData.chpInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), - "operator" : SystemParticipantTestData.chpInput.operator.getUuid().toString(), + "operator" : SystemParticipantTestData.chpInput.operator.uuid.toString(), "qCharacteristics": SystemParticipantTestData.cosPhiFixedDeSerialized, - "thermalBus" : SystemParticipantTestData.chpInput.thermalBus.getUuid().toString(), - "thermalStorage" : SystemParticipantTestData.chpInput.thermalStorage.getUuid().toString(), - "type" : SystemParticipantTestData.chpInput.type.getUuid().toString(), + "thermalBus" : SystemParticipantTestData.chpInput.thermalBus.uuid.toString(), + "thermalStorage" : SystemParticipantTestData.chpInput.thermalStorage.uuid.toString(), + "type" : SystemParticipantTestData.chpInput.type.uuid.toString(), ] BmInput | SystemParticipantTestData.bmInput || [ "uuid" : SystemParticipantTestData.bmInput.uuid.toString(), @@ -236,9 +236,9 @@ class InputEntityProcessorTest extends Specification { "node" : SystemParticipantTestData.bmInput.node.uuid.toString(), "operatesUntil" : SystemParticipantTestData.bmInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), "operatesFrom" : SystemParticipantTestData.bmInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), - "operator" : SystemParticipantTestData.bmInput.operator.getUuid().toString(), + "operator" : SystemParticipantTestData.bmInput.operator.uuid.toString(), "qCharacteristics": SystemParticipantTestData.qVDeSerialized, - "type" : SystemParticipantTestData.bmInput.type.getUuid().toString() + "type" : SystemParticipantTestData.bmInput.type.uuid.toString() ] EvInput | SystemParticipantTestData.evInput || [ "uuid" : SystemParticipantTestData.evInput.uuid.toString(), @@ -260,7 +260,7 @@ class InputEntityProcessorTest extends Specification { "node" : SystemParticipantTestData.loadInput.node.uuid.toString(), "operatesUntil" : SystemParticipantTestData.loadInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), "operatesFrom" : SystemParticipantTestData.loadInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), - "operator" : SystemParticipantTestData.loadInput.operator.getUuid().toString(), + "operator" : SystemParticipantTestData.loadInput.operator.uuid.toString(), "qCharacteristics" : SystemParticipantTestData.cosPhiFixedDeSerialized, "sRated" : SystemParticipantTestData.loadInput.sRated.getValue().doubleValue().toString(), "standardLoadProfile": SystemParticipantTestData.loadInput.standardLoadProfile.key @@ -272,9 +272,9 @@ class InputEntityProcessorTest extends Specification { "node" : SystemParticipantTestData.storageInput.node.uuid.toString(), "operatesUntil" : SystemParticipantTestData.storageInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), "operatesFrom" : SystemParticipantTestData.storageInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), - "operator" : SystemParticipantTestData.storageInput.operator.getUuid().toString(), + "operator" : SystemParticipantTestData.storageInput.operator.uuid.toString(), "qCharacteristics": SystemParticipantTestData.cosPhiFixedDeSerialized, - "type" : SystemParticipantTestData.storageInput.type.getUuid().toString() + "type" : SystemParticipantTestData.storageInput.type.uuid.toString() ] HpInput | SystemParticipantTestData.hpInput || [ "uuid" : SystemParticipantTestData.hpInput.uuid.toString(), @@ -282,10 +282,10 @@ class InputEntityProcessorTest extends Specification { "node" : SystemParticipantTestData.hpInput.node.uuid.toString(), "operatesUntil" : SystemParticipantTestData.hpInput.operationTime.endDate.orElse(ZonedDateTime.now()).toString(), "operatesFrom" : SystemParticipantTestData.hpInput.operationTime.startDate.orElse(ZonedDateTime.now()).toString(), - "operator" : SystemParticipantTestData.hpInput.operator.getUuid().toString(), + "operator" : SystemParticipantTestData.hpInput.operator.uuid.toString(), "qCharacteristics": SystemParticipantTestData.cosPhiFixedDeSerialized, "thermalBus" : SystemParticipantTestData.hpInput.thermalBus.uuid.toString(), - "type" : SystemParticipantTestData.hpInput.type.getUuid().toString() + "type" : SystemParticipantTestData.hpInput.type.uuid.toString() ] }