Skip to content

Commit

Permalink
fixes #47
Browse files Browse the repository at this point in the history
  • Loading branch information
rmpestano committed Sep 12, 2016
1 parent b36d86c commit 80fdad8
Show file tree
Hide file tree
Showing 7 changed files with 309 additions and 119 deletions.
33 changes: 23 additions & 10 deletions core/src/main/java/com/github/dbunit/rules/DBUnitRule.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@
import org.slf4j.LoggerFactory;

import java.sql.Connection;
import java.sql.SQLException;
import java.util.logging.Level;
import java.util.logging.Logger;

import static com.github.dbunit.rules.util.EntityManagerProvider.em;
import static com.github.dbunit.rules.util.EntityManagerProvider.isEntityManagerActive;
Expand Down Expand Up @@ -80,7 +83,7 @@ public void evaluate() throws Throwable {
executor.setDBUnitConfig(dbUnitConfig);
executor.createDataSet(dataSetConfig);
} catch (final Exception e) {
throw new RuntimeException("Could not create dataset due to following error " + e.getMessage(), e);
throw new RuntimeException(String.format("Could not create dataset for test '%s'.",description.getMethodName()), e);
}
boolean isTransactional = false;
try {
Expand Down Expand Up @@ -114,15 +117,7 @@ public void evaluate() throws Throwable {
}
throw e;
} finally {
ExportDataSet exportDataSet = resolveExportDataSet(description);
if(exportDataSet != null){
DataSetExportConfig exportConfig = DataSetExportConfig.from(exportDataSet);
String outputName = exportConfig.getOutputFileName();
if(outputName == null || "".equals(outputName.trim())){
outputName = description.getMethodName().toLowerCase()+"."+exportConfig.getDataSetFormat().name().toLowerCase();
}
DataSetExporterImpl.getInstance().export(executor.getConnectionHolder().getConnection(),exportConfig ,outputName);
}
exportDataSet(executor,description);
if (dataSetConfig != null && dataSetConfig.getExecuteStatementsAfter() != null && dataSetConfig.getExecuteStatementsAfter().length > 0) {
try {
executor.executeStatements(dataSetConfig.getExecuteStatementsAfter());
Expand All @@ -147,7 +142,9 @@ public void evaluate() throws Throwable {
executor.clearDatabase(dataSetConfig);
}
}
//no dataset provided, only export and evaluate expected dataset
} else {
exportDataSet(executor,description);
statement.evaluate();
performDataSetComparison(description);
}
Expand All @@ -158,6 +155,22 @@ public void evaluate() throws Throwable {
};
}

private void exportDataSet(DataSetExecutor executor, Description description) {
ExportDataSet exportDataSet = resolveExportDataSet(description);
if(exportDataSet != null){
DataSetExportConfig exportConfig = DataSetExportConfig.from(exportDataSet);
String outputName = exportConfig.getOutputFileName();
if(outputName == null || "".equals(outputName.trim())){
outputName = description.getMethodName().toLowerCase()+"."+exportConfig.getDataSetFormat().name().toLowerCase();
}
try {
DataSetExporterImpl.getInstance().export(executor.getDBUnitConnection(),exportConfig ,outputName);
} catch (Exception e) {
Logger.getLogger(getClass().getName()).log(Level.WARNING,"Could not export dataset after method "+description.getMethodName(),e);
}
}
}

private ExportDataSet resolveExportDataSet(Description description) {
ExportDataSet exportDataSet = description.getAnnotation(ExportDataSet.class);
if (exportDataSet == null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import com.github.dbunit.rules.configuration.DBUnitConfig;
import com.github.dbunit.rules.configuration.DataSetConfig;
import org.dbunit.DatabaseUnitException;
import org.dbunit.database.DatabaseConnection;
import org.dbunit.dataset.DataSetException;
import org.dbunit.dataset.IDataSet;

Expand Down Expand Up @@ -45,6 +46,7 @@ public interface DataSetExecutor{
void setDBUnitConfig(DBUnitConfig dbUnitConfig);

DBUnitConfig getDBUnitConfig();




DatabaseConnection getDBUnitConnection();
}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -573,4 +573,7 @@ public DBUnitConfig getDBUnitConfig() {
}


public DatabaseConnection getDBUnitConnection() {
return databaseConnection;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
package com.github.dbunit.rules.dataset.writer;

import org.dbunit.dataset.Column;
import org.dbunit.dataset.DataSetException;
import org.dbunit.dataset.IDataSet;
import org.dbunit.dataset.ITableMetaData;
import org.dbunit.dataset.stream.DataSetProducerAdapter;
import org.dbunit.dataset.stream.IDataSetConsumer;

import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.util.logging.Level;
import java.util.logging.Logger;

/**
* Created by pestano on 11/09/16.
*/
public class YMLWriter implements IDataSetConsumer {

private static final String NEW_LINE = System.getProperty("line.separator");
private static final String DOUBLE_SPACES = " ";
private static final Logger LOG = Logger.getLogger(YMLWriter.class.getName());


private OutputStreamWriter out;
private ITableMetaData metaData;

public YMLWriter(OutputStream outputStream) throws IOException {
out = new OutputStreamWriter(outputStream, "UTF-8");
}

@Override
public void startDataSet() throws DataSetException {

}

@Override
public void endDataSet() throws DataSetException {
try {
out.flush();
} catch (IOException e) {
LOG.log(Level.WARNING, "Could not end dataset.", e);
}
}

@Override
public void startTable(ITableMetaData metaData) throws DataSetException {
this.metaData = metaData;
try {
out.write(metaData.getTableName()+":"+NEW_LINE);
} catch (IOException e) {
LOG.log(Level.WARNING, "Could not start table.", e);
}
}

@Override
public void endTable() throws DataSetException {
try {
out.write(NEW_LINE);
} catch (IOException e) {
LOG.log(Level.WARNING, "Could end table.", e);
}
}

@Override
public void row(Object[] values) throws DataSetException {
try {
for (int i = 0; i < values.length; i++) {
if (i == 0) {
out.write(DOUBLE_SPACES + "- ");
} else{
out.write(DOUBLE_SPACES + DOUBLE_SPACES);
}

Column currentColumn = metaData.getColumns()[i];
out.write(metaData.getColumns()[i].getColumnName()+": ");
boolean isNumber = currentColumn.getDataType().isNumber();
if(!isNumber){
out.write("\"");
}
if(values[i] != null){
out.write(values[i].toString());
}
if(!isNumber){
out.write("\"");
}
out.write(NEW_LINE);
}
}catch (Exception e){
LOG.log(Level.WARNING, "Could not write row.",e);
}
}

public synchronized void write(IDataSet dataSet) throws DataSetException
{
DataSetProducerAdapter provider = new DataSetProducerAdapter(dataSet);
provider.setConsumer(this);
provider.produce();
}
}
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
package com.github.dbunit.rules.exporter;

import com.github.dbunit.rules.api.expoter.DataSetExportConfig;
import com.github.dbunit.rules.api.expoter.DataSetExporter;
import com.github.dbunit.rules.dataset.writer.YMLWriter;
import org.dbunit.DatabaseUnitException;
import org.dbunit.database.*;
import org.dbunit.database.DatabaseConfig;
import org.dbunit.database.DatabaseConnection;
import org.dbunit.database.ForwardOnlyResultSetTableFactory;
import org.dbunit.database.QueryDataSet;
import org.dbunit.database.search.TablesDependencyHelper;
import org.dbunit.dataset.FilteredDataSet;
import org.dbunit.dataset.IDataSet;
import org.dbunit.dataset.filter.ITableFilter;
import org.dbunit.dataset.xml.FlatXmlDataSet;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Paths;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.HashSet;
Expand All @@ -30,7 +30,7 @@
* <p/>
* based on: http://archive.oreilly.com/pub/post/dbunit_made_easy.html
*/
public class DataSetExporterImpl implements DataSetExporter {
public class DataSetExporterImpl {

/**
* A regular expression that is used to get the table name
Expand All @@ -46,21 +46,20 @@ public class DataSetExporterImpl implements DataSetExporter {
private static Logger log = Logger.getLogger(DataSetExporterImpl.class.getName());


private static DataSetExporter instance;
private static DataSetExporterImpl instance;

private DataSetExporterImpl(){}

public static DataSetExporter getInstance(){
public static DataSetExporterImpl getInstance(){
if(instance == null){
instance = new DataSetExporterImpl();
}
return instance;
}

@Override
public OutputStream export(Connection connection, DataSetExportConfig dataSetExportConfig, String outputFile) throws SQLException, DatabaseUnitException {
public OutputStream export(DatabaseConnection databaseConnection, DataSetExportConfig dataSetExportConfig, String outputFile) throws SQLException, DatabaseUnitException {

if (connection == null || connection.isClosed()) {
if (databaseConnection == null || databaseConnection.getConnection() == null || databaseConnection.getConnection().isClosed()) {
throw new RuntimeException("Provide a valid connection to export datasets");
}

Expand All @@ -78,45 +77,31 @@ public OutputStream export(Connection connection, DataSetExportConfig dataSetExp

boolean hasIncludes = dataSetExportConfig.getIncludeTables() != null && dataSetExportConfig.getIncludeTables().length > 0;

IDatabaseConnection dbunitConnection = new DatabaseConnection(connection);

DatabaseConfig config = dbunitConnection.getConfig();
DatabaseConfig config = databaseConnection.getConfig();
config.setProperty(DatabaseConfig.PROPERTY_RESULTSET_TABLE_FACTORY, new ForwardOnlyResultSetTableFactory());

Set<String> targetTables = new HashSet<>();

if (hasIncludes) {
targetTables.addAll(Arrays.asList(dataSetExportConfig.getIncludeTables()));
if (dataSetExportConfig.isDependentTables()) {
String[] dependentTables = TablesDependencyHelper.getAllDependentTables(dbunitConnection, dataSetExportConfig.getIncludeTables());
String[] dependentTables = TablesDependencyHelper.getAllDependentTables(databaseConnection, dataSetExportConfig.getIncludeTables());
if (dependentTables != null && dependentTables.length > 0) {
targetTables.addAll(Arrays.asList(dependentTables));
}
}
}


QueryDataSet queryDataSet = null;
if (dataSetExportConfig.getQueryList() != null && dataSetExportConfig.getQueryList().length > 0) {
queryDataSet = new QueryDataSet(dbunitConnection);
addQueries(queryDataSet, dataSetExportConfig.getQueryList(), targetTables);
IDataSet dataSet = new QueryDataSet(databaseConnection);
if ((targetTables != null && !targetTables.isEmpty()) || (dataSetExportConfig.getQueryList() != null && dataSetExportConfig.getQueryList().length > 0)) {
addQueries((QueryDataSet)dataSet, dataSetExportConfig.getQueryList(), targetTables);
} else{
dataSet = databaseConnection.createDataSet();
}


IDataSet dataset = null;
ITableFilter filter = null;
//sequenceFiltering
if (!targetTables.isEmpty()) {
filter = new DatabaseSequenceFilter(dbunitConnection, targetTables.toArray(new String[targetTables.size()]));
} else {
//if no tables are included then use seq filtering on all tables
filter = new DatabaseSequenceFilter(dbunitConnection);
}
if (queryDataSet != null) {
dataset = queryDataSet;
} else {
dataset = new FilteredDataSet(filter, dbunitConnection.createDataSet());
}
FileOutputStream fos = null;
try {
if(outputFile.contains(System.getProperty("file.separator"))){
Expand All @@ -127,9 +112,19 @@ public OutputStream export(Connection connection, DataSetExportConfig dataSetExp
fos = new FileOutputStream(outputFile);
switch (dataSetExportConfig.getDataSetFormat()) {
case XML: {
FlatXmlDataSet.write(dataset, fos);
FlatXmlDataSet.write(dataSet, fos);
log.info("DataSet exported successfully at "+ Paths.get(outputFile).toAbsolutePath().toString());
break;
}
case YML: {
new YMLWriter(fos).write(dataSet);
log.info("DataSet exported successfully at "+ Paths.get(outputFile).toAbsolutePath().toString());
break;
}
default: {
throw new RuntimeException("Format not supported.");
}

}
} catch (Exception e) {
log.log(Level.SEVERE, "Could not export dataset.", e);
Expand Down
Loading

0 comments on commit 80fdad8

Please sign in to comment.