Skip to content

Commit

Permalink
Fix importer inconsistencies and bug fixes (#1602)
Browse files Browse the repository at this point in the history
* Fix the importer inconsistencies and satus message

* Fix the database importer status message and add Changelog entry

* Fix the row filter style when files are added later

* Convert to a ternary statement

* Remove redundant parameter and brief refactor to fix sonar issues.
  • Loading branch information
Auriga2 committed Dec 17, 2021
1 parent 8a234e4 commit 9f9d4aa
Show file tree
Hide file tree
Showing 5 changed files with 183 additions and 116 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
@@ -1,5 +1,13 @@
# Constellation Changes

## Changes in December 2021

- Changed the return type of `processVertices` and `processTransactions` methods
in `ImportJDBCPlugin` and `ImportDelimitedPlugin` classes to return the number
of imported rows. Added a new parameter `totalRows` in `displaySummaryAlert`
method of `ImportJDBCPlugin` class. These allow a more meaningful
summary status message after importing.

## Changes in November 2021

- Added `netbeans.exception.report.min.level=900` and
Expand Down
Expand Up @@ -180,13 +180,7 @@ public RunPane(final ImportController importController, final String displayText
filterField.setFocusTraversable(false);
filterField.setMinHeight(USE_PREF_SIZE);
filterField.setStyle(FILTER_STYLE);
filterField.textProperty().addListener((observable, oldValue, newValue) -> {
if (setFilter(newValue)) {
filterField.setStyle(FILTER_STYLE);
} else {
filterField.setStyle(FILTER_STYLE_ALERT);
}
});
filterField.textProperty().addListener((observable, oldValue, newValue) -> setFilterStyle(newValue));

filterField.setPromptText("Currently unavailable. The filter will be ready to use shortly");
FILTER_LOAD.thenRun(() -> filterField.setPromptText("Start typing to search, e.g. first_name==\"NICK\""));
Expand Down Expand Up @@ -305,6 +299,10 @@ public RunPane(final ImportController importController, final String displayText
});
}

private void setFilterStyle(final String value) {
filterField.setStyle(setFilter(value) ? FILTER_STYLE : FILTER_STYLE_ALERT);
}

/**
* Update name associated with this pane. This value is used in ImportDefinition construction to identify the
* source of the ImportDefinition - ultimately being used when performing import to support an import status dialog.
Expand Down Expand Up @@ -457,7 +455,7 @@ public void setSampleData(final String[] columnLabels, final ObservableList<Tabl
}
currentRows = newRows;
sampleDataView.setItems(currentRows);
setFilter(filter);
setFilterStyle(filter);
}

public void clearFilters() {
Expand Down Expand Up @@ -490,7 +488,7 @@ public boolean validate(final ImportTableColumn column) {
return false;
}

public boolean setFilter(final String filter) {
private boolean setFilter(final String filter) {
this.filter = filter;
if (filter.isEmpty()) {
currentRows.forEach(tableRow -> tableRow.setIncluded(true));
Expand Down Expand Up @@ -571,7 +569,7 @@ void update(final ImportDefinition impdef) {
script = "";
}
filterField.setText(script);
setFilter(script);
setFilterStyle(script);

updateColumns(impdef, sourceVertexAttributeList, AttributeType.SOURCE_VERTEX);
updateColumns(impdef, destinationVertexAttributeList, AttributeType.DESTINATION_VERTEX);
Expand Down
Expand Up @@ -211,13 +211,13 @@ protected void edit(final GraphWriteMethods graph, final PluginInteraction inter
final boolean initialiseWithSchema = parameters.getParameters().get(SCHEMA_PARAMETER_ID).getBooleanValue();
final PluginParameters parserParameters = (PluginParameters) parameters.getParameters().get(PARSER_PARAMETER_IDS_PARAMETER_ID).getObjectValue();
final boolean filesIncludeHeaders = parameters.getParameters().get(FILES_INCLUDE_HEADERS_PARAMETER_ID).getBooleanValue();
final List<Integer> newVertices = new ArrayList<>();
boolean positionalAtrributesExist = false;
final List<String> validFiles = new ArrayList<>();
final List<String> emptyFiles = new ArrayList<>();
final List<String> invalidFiles = new ArrayList<>();
final List<String> emptyRunConfigs = new ArrayList<>();
int importRows = 0;
int totalRows = 0;
int totalImportedRows = 0;
int dataSize = 0;

// Loop through import definitions looking for those that don't have either a source or destination vertex (as
Expand All @@ -232,13 +232,13 @@ protected void edit(final GraphWriteMethods graph, final PluginInteraction inter
for (final File file : files) {
interaction.setProgress(0, 0, "Reading File: " + file.getName(), true);
List<String[]> data = null;
int importedRowsPerFile = 0;

try {
data = parser.parse(new InputSource(file), parserParameters);
dataSize = filesIncludeHeaders ? data.size() - 1 : data.size();
importRows = importRows + Integer.max(0, dataSize);
totalRows = totalRows + Integer.max(0, dataSize);

LOGGER.log(Level.INFO, "Imported {0} rows of data from file {1}. {2} total rows imported", new Object[]{dataSize, file.getPath(), importRows});
if (dataSize > 0) {
if (validFiles.isEmpty()) {
validFiles.add(file.getName().concat(" (").concat(Integer.toString(dataSize)).concat(" rows)"));
Expand All @@ -264,24 +264,26 @@ protected void edit(final GraphWriteMethods graph, final PluginInteraction inter
// No source vertex definitions are set, the only option left is destination vertexes being mapped.
// Process destination vertexes if defintions are defined, otherwise there is nothing to do.
if (!definition.getDefinitions(AttributeType.DESTINATION_VERTEX).isEmpty()) {
processVertices(definition, graph, data, AttributeType.DESTINATION_VERTEX, initialiseWithSchema, interaction, file.getName(), newVertices);
importedRowsPerFile += processVertices(definition, graph, data, AttributeType.DESTINATION_VERTEX, initialiseWithSchema, interaction, file.getName());
}
} else if (definition.getDefinitions(AttributeType.DESTINATION_VERTEX).isEmpty()) {
// Source defintions exist, but no destination definitions exist. Process the source definitions.
processVertices(definition, graph, data, AttributeType.SOURCE_VERTEX, initialiseWithSchema, interaction, file.getName(), newVertices);
importedRowsPerFile += processVertices(definition, graph, data, AttributeType.SOURCE_VERTEX, initialiseWithSchema, interaction, file.getName());
} else {
// Both source and destination defintions exist, process them.
processTransactions(definition, graph, data, initialiseWithSchema, interaction, file.getName());
importedRowsPerFile += processTransactions(definition, graph, data, initialiseWithSchema, interaction, file.getName());
}

// Determine if a positional attribute has been defined, if so update the overall flag
final boolean isPositional = attributeDefintionIsPositional(definition.getDefinitions(AttributeType.SOURCE_VERTEX), definition.getDefinitions(AttributeType.DESTINATION_VERTEX));
positionalAtrributesExist = (positionalAtrributesExist || isPositional);
}
}
totalImportedRows += importedRowsPerFile;
LOGGER.log(Level.INFO, "Imported {0} rows of data from file {1} containing {2} total rows", new Object[]{importedRowsPerFile, file.getPath(), dataSize});
}

displaySummaryAlert(graph.getVertexCount() + graph.getTransactionCount(), importRows, validFiles, emptyFiles, invalidFiles, emptyRunConfigs);
displaySummaryAlert(graph.getVertexCount() + graph.getTransactionCount(), totalImportedRows, validFiles, emptyFiles, invalidFiles, emptyRunConfigs);

ConstellationLoggerHelper.importPropertyBuilder(
this,
Expand All @@ -300,7 +302,8 @@ protected void edit(final GraphWriteMethods graph, final PluginInteraction inter
graph.validateKey(GraphElementType.TRANSACTION, true);

// unfortunately need to arrange with pendants and uncollide because grid arranger works based on selection
final VertexListInclusionGraph vlGraph = new VertexListInclusionGraph(graph, AbstractInclusionGraph.Connections.NONE, newVertices);
final VertexListInclusionGraph vlGraph = new VertexListInclusionGraph(graph, AbstractInclusionGraph.Connections.NONE, new ArrayList<>());

PluginExecutor.startWith(ArrangementPluginRegistry.GRID_COMPOSITE)
.followedBy(ArrangementPluginRegistry.PENDANTS)
.followedBy(ArrangementPluginRegistry.UNCOLLIDE)
Expand All @@ -322,12 +325,14 @@ private static boolean attributeDefintionIsPositional(List<ImportAttributeDefini
return destAttributeDefinitions.stream().map(attribute -> attribute.getAttribute().getName()).anyMatch(name -> (VisualConcept.VertexAttribute.X.getName().equals(name) || VisualConcept.VertexAttribute.Y.getName().equals(name) || VisualConcept.VertexAttribute.Z.getName().equals(name)));
}

private static void processVertices(ImportDefinition definition, GraphWriteMethods graph, List<String[]> data, AttributeType attributeType, boolean initialiseWithSchema, PluginInteraction interaction, String source, final List<Integer> newVertices) throws InterruptedException {
private static int processVertices(ImportDefinition definition, GraphWriteMethods graph, List<String[]> data, AttributeType attributeType,
boolean initialiseWithSchema, PluginInteraction interaction, String source) throws InterruptedException {
final List<ImportAttributeDefinition> attributeDefinitions = definition.getDefinitions(attributeType);

addAttributes(graph, GraphElementType.VERTEX, attributeDefinitions);

int currentRow = 0;
int importedRows = 0;
final int totalRows = data.size() - definition.getFirstRow();

final RowFilter filter = definition.getRowFilter();
Expand All @@ -337,8 +342,9 @@ private static void processVertices(ImportDefinition definition, GraphWriteMetho

final String[] row = data.get(i);
if (filter == null || filter.passesFilter(i - 1, row)) {
// Count the number of processed rows to notify in the status message
++importedRows;
final int vertexId = graph.addVertex();
newVertices.add(vertexId);

for (final ImportAttributeDefinition attributeDefinition : attributeDefinitions) {
attributeDefinition.setValue(graph, vertexId, row, (i - 1));
Expand All @@ -349,9 +355,10 @@ private static void processVertices(ImportDefinition definition, GraphWriteMetho
}
}
}
return importedRows;
}

private static void processTransactions(ImportDefinition definition, GraphWriteMethods graph, List<String[]> data, boolean initialiseWithSchema, PluginInteraction interaction, String source) throws InterruptedException {
private static int processTransactions(ImportDefinition definition, GraphWriteMethods graph, List<String[]> data, boolean initialiseWithSchema, PluginInteraction interaction, String source) throws InterruptedException {
final List<ImportAttributeDefinition> sourceVertexDefinitions = definition.getDefinitions(AttributeType.SOURCE_VERTEX);
final List<ImportAttributeDefinition> destinationVertexDefinitions = definition.getDefinitions(AttributeType.DESTINATION_VERTEX);
final List<ImportAttributeDefinition> transactionDefinitions = definition.getDefinitions(AttributeType.TRANSACTION);
Expand All @@ -369,6 +376,7 @@ private static void processTransactions(ImportDefinition definition, GraphWriteM
addAttributes(graph, GraphElementType.TRANSACTION, transactionDefinitions);

int currentRow = 0;
int importedRows = 0;
final int totalRows = data.size() - definition.getFirstRow();

final RowFilter filter = definition.getRowFilter();
Expand All @@ -379,6 +387,8 @@ private static void processTransactions(ImportDefinition definition, GraphWriteM
final String[] row = data.get(i);

if (filter == null || filter.passesFilter(i - 1, row)) {
// Count the number of processed rows to notify in the status message
++importedRows;
final int sourceVertexId = graph.addVertex();
for (final ImportAttributeDefinition attributeDefinition : sourceVertexDefinitions) {
attributeDefinition.setValue(graph, sourceVertexId, row, (i - 1));
Expand Down Expand Up @@ -407,6 +417,7 @@ private static void processTransactions(ImportDefinition definition, GraphWriteM
}
}
}
return importedRows;
}

/**
Expand Down

0 comments on commit 9f9d4aa

Please sign in to comment.