Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
0001632: SQL Server bulk loader does not handle BLOB
  • Loading branch information
erilong committed Mar 5, 2014
1 parent c8d8cf8 commit 2768467
Showing 1 changed file with 22 additions and 5 deletions.
Expand Up @@ -4,6 +4,9 @@
import java.sql.SQLException;
import java.sql.Statement;

import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.Hex;
import org.jumpmind.db.model.Column;
import org.jumpmind.db.model.Table;
import org.jumpmind.db.platform.IDatabasePlatform;
import org.jumpmind.db.sql.JdbcSqlTransaction;
Expand All @@ -23,6 +26,7 @@ public class MsSqlBulkDatabaseWriter extends DatabaseWriter {
protected IStagedResource stagedInputFile;
protected int loadedRows = 0;
protected boolean fireTriggers;
protected boolean needsBinaryConversion;

public MsSqlBulkDatabaseWriter(IDatabasePlatform platform,
IStagingManager stagingManager, NativeJdbcExtractor jdbcExtractor,
Expand All @@ -36,6 +40,13 @@ public MsSqlBulkDatabaseWriter(IDatabasePlatform platform,

public boolean start(Table table) {
if (super.start(table)) {
needsBinaryConversion = false;
for (Column column : targetTable.getColumns()) {
if (column.isOfBinaryType()) {
needsBinaryConversion = true;
break;
}
}
//TODO: Did this because start is getting called multiple times
// for the same table in a single batch before end is being called
if (this.stagedInputFile == null) {
Expand Down Expand Up @@ -67,11 +78,17 @@ public void write(CsvData data) {
statistics.get(batch).increment(DataWriterStatisticConstants.LINENUMBER);
statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS);
try {
String formattedData = CsvUtils.escapeCsvData(
data.getParsedData(CsvData.ROW_DATA), '\0', '\0',
CsvWriter.ESCAPE_MODE_DOUBLED);
byte[] dataToLoad = formattedData.getBytes();
this.stagedInputFile.getOutputStream().write(dataToLoad);
String[] parsedData = data.getParsedData(CsvData.ROW_DATA);
if (needsBinaryConversion) {
Column[] columns = targetTable.getColumns();
for (int i = 0; i < columns.length; i++) {
if (columns[i].isOfBinaryType()) {
parsedData[i] = new String(Hex.encodeHex(Base64.decodeBase64(parsedData[i].getBytes())));
}
}
}
String formattedData = CsvUtils.escapeCsvData(parsedData, '\0', '\0', CsvWriter.ESCAPE_MODE_DOUBLED);
this.stagedInputFile.getOutputStream().write(formattedData.getBytes());
this.stagedInputFile.getOutputStream().write('\r');
this.stagedInputFile.getOutputStream().write('\n');
loadedRows++;
Expand Down

0 comments on commit 2768467

Please sign in to comment.