Skip to content

Commit

Permalink
[HJwIMwqg] apoc.export.csv.graph incorrectly exports properties with …
Browse files Browse the repository at this point in the history
…datatype float (#403)

* [HJwIMwqg] apoc.export.csv.graph incorrectly exports properties with datatype float

* [HJwIMwqg] code clean

* [HJwIMwqg] added apoc.export.csv.data and changed fileName

* [HJwIMwqg] added comments and various tests
  • Loading branch information
vga91 committed Aug 31, 2023
1 parent 40e11e0 commit ae1d8be
Show file tree
Hide file tree
Showing 6 changed files with 382 additions and 71 deletions.
2 changes: 1 addition & 1 deletion common/src/main/java/apoc/export/util/BulkImportUtil.java
Expand Up @@ -28,7 +28,7 @@

public class BulkImportUtil {

private static Map<Class<?>, String> allowedMapping = Collections.unmodifiableMap(new HashMap(){{
public static Map<Class<?>, String> allowedMapping = Collections.unmodifiableMap(new HashMap(){{
put(Double.class, "double");
put(Float.class, "float");
put(Integer.class, "int");
Expand Down
14 changes: 12 additions & 2 deletions common/src/main/java/apoc/export/util/MetaInformation.java
Expand Up @@ -37,10 +37,12 @@
import java.util.Set;
import java.util.stream.Collectors;

import static apoc.export.util.BulkImportUtil.allowedMapping;
import static apoc.gephi.GephiFormatUtils.getCaption;
import static apoc.meta.tablesforlabels.PropertyTracker.typeMappings;
import static apoc.util.collection.Iterables.stream;
import static java.util.Arrays.asList;
import static org.apache.commons.lang3.ClassUtils.primitiveToWrapper;

/**
* @author mh
Expand Down Expand Up @@ -113,9 +115,17 @@ public static void updateKeyTypes(Map<String, Class> keyTypes, Entity pc) {

public static String typeFor(Class value, Set<String> allowed) {
if (value == void.class) return null; // Is this necessary?
final boolean isArray = value.isArray();
value = isArray ? value.getComponentType() : value;
// csv case
// consistent with https://neo4j.com/docs/operations-manual/current/tools/neo4j-admin/neo4j-admin-import/#import-tool-header-format-properties
if (allowed == null) {
return allowedMapping.getOrDefault( primitiveToWrapper(value), "string" );
}
// graphML case
String name = value.getSimpleName().toLowerCase();
boolean isAllowed = allowed.contains(name);
Types type = Types.of(value);
String name = (value.isArray() ? value.getComponentType() : value).getSimpleName().toLowerCase();
boolean isAllowed = allowed != null && allowed.contains(name);
switch (type) {
case NULL:
return null;
Expand Down
5 changes: 5 additions & 0 deletions common/src/main/resources/manyTypes.csv
@@ -0,0 +1,5 @@
_id:id,_labels:label,alpha:short,beta:byte,epsilon:int,eta:long,five:date,four:localdatetime,gamma:char,iota,one:datetime,seven,six:duration,theta:double,three:localtime,two:time,zeta:float,_start:id,_end:id,_type:label,rel:point
0,:SuperNode,,,,,2020-01-01,2021-06-08T00:00,,,2018-05-10T10:30+02:00[Europe/Berlin],2020,P5M1DT12H,,17:58:30,18:02:33Z,,,,,
1,:AnotherNode,1,"cXdlcnR5",1,1,,,A,bar,,,,10.1,,,1.1,,,,
,,,,,,,,,,,,,,,,,0,1,REL_TYPE,{"crs":"cartesian","x":56.7,"y":12.78,"z":null}

47 changes: 31 additions & 16 deletions core/src/main/java/apoc/export/csv/CsvFormat.java
Expand Up @@ -40,7 +40,9 @@
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Writer;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
Expand Down Expand Up @@ -163,15 +165,22 @@ public String[] writeResultHeader(Result result, CSVWriter out) {
public void writeAll(SubGraph graph, Reporter reporter, ExportConfig config, CSVWriter out) {
Map<String, Class> nodePropTypes = collectPropTypesForNodes(graph, db, config);
Map<String, Class> relPropTypes = collectPropTypesForRelationships(graph, db, config);
List<String> nodeHeader = generateHeader(nodePropTypes, config.useTypes(), NODE_HEADER_FIXED_COLUMNS);
List<String> relHeader = generateHeader(relPropTypes, config.useTypes(), REL_HEADER_FIXED_COLUMNS);
List<String> header = new ArrayList<>(nodeHeader);
List<Map.Entry<String, String>> nodeHeader = generateHeader(nodePropTypes, config.useTypes(), NODE_HEADER_FIXED_COLUMNS);
List<Map.Entry<String, String>> relHeader = generateHeader(relPropTypes, config.useTypes(), REL_HEADER_FIXED_COLUMNS);
List<Map.Entry<String, String>> header = new ArrayList<>(nodeHeader);
header.addAll(relHeader);
out.writeNext(header.toArray(new String[header.size()]), applyQuotesToAll);
out.writeNext(header.stream().map(e -> e.getKey() + e.getValue()).toArray(String[]::new), applyQuotesToAll);
int cols = header.size();

writeNodes(graph, out, reporter, nodeHeader.subList(NODE_HEADER_FIXED_COLUMNS.length, nodeHeader.size()), cols, config.getBatchSize());
writeRels(graph, out, reporter, relHeader.subList(REL_HEADER_FIXED_COLUMNS.length, relHeader.size()), cols, nodeHeader.size(), config.getBatchSize());
writeNodes(graph, out, reporter, getNamesHeader(nodeHeader, NODE_HEADER_FIXED_COLUMNS.length), cols, config.getBatchSize());
writeRels(graph, out, reporter, getNamesHeader(relHeader, REL_HEADER_FIXED_COLUMNS.length), cols, nodeHeader.size(), config.getBatchSize());
}

private List<String> getNamesHeader(List<Map.Entry<String, String>> header, int length) {
return header.subList(length, header.size())
.stream()
.map(Map.Entry::getKey)
.collect(Collectors.toList());
}

private void writeAllBulkImport(SubGraph graph, Reporter reporter, ExportConfig config, ExportFileManager writer) {
Expand Down Expand Up @@ -281,20 +290,26 @@ private void writeRow(ExportConfig config, ExportFileManager writer, Set<String>
}
}

private List<String> generateHeader(Map<String, Class> propTypes, boolean useTypes, String... starters) {
List<String> result = new ArrayList<>();
if (useTypes) {
Collections.addAll(result, starters);
} else {
result.addAll(Stream.of(starters).map(s -> s.split(":")[0]).collect(Collectors.toList()));
}
private List<Map.Entry<String, String>> generateHeader(Map<String, Class> propTypes, boolean useTypes, String... starters) {
// we create a List of Entry<PropertyName, PropertyDataType>,
// so that the headers will look like nameProp:typeProp,nameProp2:typeProp2,...
// or, with config `useTypes: false`, like nameProp,nameProp2,...
List<Map.Entry<String, String>> result = Arrays.stream(starters)
.map(item -> {
final String[] split = item.split(":");
// with the config `useTypes: true`, we add `:<typeProp>` to each colum
return new AbstractMap.SimpleEntry<>(split[0], useTypes ? (":" + split[1]) : "");
})
.collect(Collectors.toList());

result.addAll(propTypes.entrySet().stream()
.map(entry -> {
String type = MetaInformation.typeFor(entry.getValue(), null);
return (type == null || type.equals("string") || !useTypes)
? entry.getKey() : entry.getKey() + ":" + type;
// with the config `useTypes: true`, if the type is not null , we add `:<typeProp>` to each colum
return new AbstractMap.SimpleEntry<>(entry.getKey(),
(type == null || type.equals("string") || !useTypes) ? "" : ":" + type);
})
.sorted()
.sorted(Map.Entry.comparingByKey())
.collect(Collectors.toList()));
return result;
}
Expand Down

0 comments on commit ae1d8be

Please sign in to comment.