From 7f0b91c6d4c42144ac627c52364366781d6ac622 Mon Sep 17 00:00:00 2001 From: maxwellpettit Date: Fri, 19 May 2017 08:29:56 -0400 Subject: [PATCH] 0003112: Make Outgoing Batch and Incoming Batch Similar --- .../symmetric/io/MongoDatabaseWriter.java | 12 +- .../symmetric/io/MsSqlBulkDatabaseWriter.java | 488 +-- .../symmetric/io/MySqlBulkDatabaseWriter.java | 8 +- .../io/OracleBulkDatabaseWriter.java | 4 +- .../io/PostgresBulkDatabaseWriter.java | 4 +- .../io/RedshiftBulkDatabaseWriter.java | 8 +- .../symmetric/model/AbstractBatch.java | 477 ++- .../jumpmind/symmetric/model/BatchAck.java | 340 +- .../symmetric/model/IncomingBatch.java | 141 +- .../symmetric/model/OutgoingBatch.java | 262 +- .../model/OutgoingBatchWithPayload.java | 178 +- .../symmetric/model/RemoteNodeStatus.java | 4 +- .../route/DefaultBatchAlgorithm.java | 2 +- .../route/NonTransactionalBatchAlgorithm.java | 2 +- .../symmetric/service/IDataService.java | 338 +- .../service/IIncomingBatchService.java | 126 +- .../service/impl/AcknowledgeService.java | 4 +- .../service/impl/DataExtractorService.java | 24 +- .../service/impl/DataLoaderService.java | 8 +- .../symmetric/service/impl/DataService.java | 4 +- .../service/impl/FileSyncService.java | 4 +- .../service/impl/IncomingBatchService.java | 47 +- .../impl/IncomingBatchServiceSqlMap.java | 28 +- .../service/impl/MultiBatchStagingWriter.java | 14 +- .../service/impl/OutgoingBatchService.java | 42 +- .../impl/OutgoingBatchServiceSqlMap.java | 36 +- .../symmetric/service/impl/RouterService.java | 4 +- .../transport/AbstractTransportManager.java | 326 +- .../src/main/resources/symmetric-schema.xml | 119 +- .../symmetric/model/OutgoingBatchesTest.java | 2 +- .../impl/AbstractDataLoaderServiceTest.java | 16 +- .../impl/AbstractRouterServiceTest.java | 4 +- .../data/writer/AbstractDatabaseWriter.java | 2 +- .../writer/DataWriterStatisticConstants.java | 2 +- .../io/data/writer/DefaultDatabaseWriter.java | 28 +- .../symmetric/web/PullUriHandler.java | 212 +- .../symmetric/web/rest/RestService.java | 3558 ++++++++--------- .../test/AbstractIntegrationTest.java | 2 +- .../jumpmind/symmetric/test/AbstractTest.java | 2 +- ...MultiNodeGroupSharedTablesRoutingTest.java | 8 +- 40 files changed, 3512 insertions(+), 3378 deletions(-) diff --git a/symmetric-client/src/main/java/org/jumpmind/symmetric/io/MongoDatabaseWriter.java b/symmetric-client/src/main/java/org/jumpmind/symmetric/io/MongoDatabaseWriter.java index 86db62f063..1e49d80c09 100644 --- a/symmetric-client/src/main/java/org/jumpmind/symmetric/io/MongoDatabaseWriter.java +++ b/symmetric-client/src/main/java/org/jumpmind/symmetric/io/MongoDatabaseWriter.java @@ -94,7 +94,7 @@ protected void logFailureDetails(Throwable e, CsvData data, boolean logLastDmlDe } protected LoadStatus upsert(CsvData data) { - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); try { DB db = clientManager.getDB(objectMapper.mapToDatabase(this.targetTable)); DBCollection collection = db.getCollection(objectMapper @@ -117,13 +117,13 @@ protected LoadStatus upsert(CsvData data) { throw new SymmetricException("Failed to write data: " + object.toString()); } } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } } @Override protected LoadStatus delete(CsvData data, boolean useConflictDetection) { - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); try { DB db = clientManager.getDB(objectMapper.mapToDatabase(this.targetTable)); DBCollection collection = db.getCollection(objectMapper @@ -143,7 +143,7 @@ protected LoadStatus delete(CsvData data, boolean useConflictDetection) { } return LoadStatus.SUCCESS; } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } } @@ -155,7 +155,7 @@ protected boolean create(CsvData data) { @Override protected boolean sql(CsvData data) { - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); try { DB db = clientManager.getDB(objectMapper.mapToDatabase(this.targetTable)); String command = data.getParsedData(CsvData.ROW_DATA)[0]; @@ -163,7 +163,7 @@ protected boolean sql(CsvData data) { CommandResult results = db.command(command); log.info("The results of the command were: {}", results); } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } return true; } diff --git a/symmetric-client/src/main/java/org/jumpmind/symmetric/io/MsSqlBulkDatabaseWriter.java b/symmetric-client/src/main/java/org/jumpmind/symmetric/io/MsSqlBulkDatabaseWriter.java index b9b578d451..d7bf5f2ea6 100644 --- a/symmetric-client/src/main/java/org/jumpmind/symmetric/io/MsSqlBulkDatabaseWriter.java +++ b/symmetric-client/src/main/java/org/jumpmind/symmetric/io/MsSqlBulkDatabaseWriter.java @@ -1,245 +1,245 @@ -/** - * Licensed to JumpMind Inc under one or more contributor - * license agreements. See the NOTICE file distributed - * with this work for additional information regarding - * copyright ownership. JumpMind Inc licenses this file - * to you under the GNU General Public License, version 3.0 (GPLv3) - * (the "License"); you may not use this file except in compliance - * with the License. - * - * You should have received a copy of the GNU General Public License, - * version 3.0 (GPLv3) along with this library; if not, see - * . - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.jumpmind.symmetric.io; - -import java.io.OutputStream; -import java.sql.Connection; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.Map; - -import org.apache.commons.codec.binary.Base64; -import org.apache.commons.codec.binary.Hex; -import org.apache.commons.lang.StringEscapeUtils; -import org.apache.commons.lang.StringUtils; -import org.jumpmind.db.model.Column; -import org.jumpmind.db.model.Table; -import org.jumpmind.db.platform.DatabaseInfo; -import org.jumpmind.db.platform.IDatabasePlatform; -import org.jumpmind.db.sql.JdbcSqlTransaction; -import org.jumpmind.db.util.BinaryEncoding; -import org.jumpmind.symmetric.io.data.CsvData; -import org.jumpmind.symmetric.io.data.DataEventType; -import org.jumpmind.symmetric.io.data.writer.DataWriterStatisticConstants; -import org.jumpmind.symmetric.io.data.writer.DefaultDatabaseWriter; -import org.jumpmind.symmetric.io.stage.IStagedResource; -import org.jumpmind.symmetric.io.stage.IStagingManager; -import org.springframework.jdbc.support.nativejdbc.NativeJdbcExtractor; - -public class MsSqlBulkDatabaseWriter extends DefaultDatabaseWriter { - - protected NativeJdbcExtractor jdbcExtractor; - protected int maxRowsBeforeFlush; - protected IStagingManager stagingManager; - protected IStagedResource stagedInputFile; - protected String rowTerminator = "\r\n"; - protected String fieldTerminator = "||"; - protected int loadedRows = 0; - protected boolean fireTriggers; - protected String uncPath; - protected boolean needsBinaryConversion; - protected boolean needsColumnsReordered; - protected Table table = null; - protected Table databaseTable = null; - - public MsSqlBulkDatabaseWriter(IDatabasePlatform platform, - IStagingManager stagingManager, NativeJdbcExtractor jdbcExtractor, - int maxRowsBeforeFlush, boolean fireTriggers, String uncPath, String fieldTerminator, String rowTerminator) { - super(platform); - this.jdbcExtractor = jdbcExtractor; - this.maxRowsBeforeFlush = maxRowsBeforeFlush; - this.stagingManager = stagingManager; - this.fireTriggers = fireTriggers; - if (fieldTerminator != null && fieldTerminator.length() > 0) { - this.fieldTerminator = fieldTerminator; - } - if (rowTerminator != null && rowTerminator.length() > 0) { - this.rowTerminator = rowTerminator; - } - this.uncPath = uncPath; - } - - public boolean start(Table table) { - this.table = table; - if (super.start(table)) { - needsBinaryConversion = false; - if (! batch.getBinaryEncoding().equals(BinaryEncoding.HEX)) { - for (Column column : targetTable.getColumns()) { - if (column.isOfBinaryType()) { - needsBinaryConversion = true; - break; - } - } - } - databaseTable = platform.getTableFromCache(sourceTable.getCatalog(), sourceTable.getSchema(), - sourceTable.getName(), false); - String[] csvNames = targetTable.getColumnNames(); - String[] columnNames = databaseTable.getColumnNames(); - needsColumnsReordered = false; - for (int i = 0; i < csvNames.length; i++) { - if (! csvNames[i].equals(columnNames[i])) { - needsColumnsReordered = true; - break; - } - } - //TODO: Did this because start is getting called multiple times - // for the same table in a single batch before end is being called - if (this.stagedInputFile == null) { - createStagingFile(); - } - return true; - } else { - return false; - } - } - - @Override - public void end(Table table) { - try { - flush(); - this.stagedInputFile.close(); - this.stagedInputFile.delete(); - } finally { - super.end(table); - } - } - - public void write(CsvData data) { - DataEventType dataEventType = data.getDataEventType(); - - switch (dataEventType) { - case INSERT: - statistics.get(batch).increment(DataWriterStatisticConstants.STATEMENTCOUNT); - statistics.get(batch).increment(DataWriterStatisticConstants.LINENUMBER); - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); - try { - String[] parsedData = data.getParsedData(CsvData.ROW_DATA); - if (needsBinaryConversion) { - Column[] columns = targetTable.getColumns(); - for (int i = 0; i < columns.length; i++) { - if (columns[i].isOfBinaryType()) { - if (batch.getBinaryEncoding().equals(BinaryEncoding.BASE64) && parsedData[i] != null) { - parsedData[i] = new String(Hex.encodeHex(Base64.decodeBase64(parsedData[i].getBytes()))); - } - } - } - } - OutputStream out = this.stagedInputFile.getOutputStream(); - if (needsColumnsReordered) { - Map mapData = data.toColumnNameValuePairs(targetTable.getColumnNames(), CsvData.ROW_DATA); - String[] columnNames = databaseTable.getColumnNames(); - for (int i = 0; i < columnNames.length; i++) { - String columnData = mapData.get(columnNames[i]); - if (columnData != null) { - out.write(columnData.getBytes()); - } - if (i + 1 < columnNames.length) { - out.write(fieldTerminator.getBytes()); - } - } - } else { - for (int i = 0; i < parsedData.length; i++) { - if (parsedData[i] != null) { - out.write(parsedData[i].getBytes()); - } - if (i + 1 < parsedData.length) { - out.write(fieldTerminator.getBytes()); - } - } - } - out.write(rowTerminator.getBytes()); - loadedRows++; - } catch (Exception ex) { - throw getPlatform().getSqlTemplate().translate(ex); - } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); - } - break; - case UPDATE: - case DELETE: - default: - flush(); - super.write(data); - break; - } - - if (loadedRows >= maxRowsBeforeFlush) { - flush(); - } - } - - protected void flush() { - if (loadedRows > 0) { - this.stagedInputFile.close(); - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); - String filename; - if (StringUtils.isEmpty(uncPath)) { - filename = stagedInputFile.getFile().getAbsolutePath(); - } else { - filename = uncPath + "\\" + stagedInputFile.getFile().getName(); - } - try { - DatabaseInfo dbInfo = platform.getDatabaseInfo(); - String quote = dbInfo.getDelimiterToken(); - String catalogSeparator = dbInfo.getCatalogSeparator(); - String schemaSeparator = dbInfo.getSchemaSeparator(); - JdbcSqlTransaction jdbcTransaction = (JdbcSqlTransaction) transaction; - Connection c = jdbcTransaction.getConnection(); - String rowTerminatorString = ""; - /* - * There seems to be a bug with the SQL server bulk insert when - * you have one row with binary data at the end using \n as the - * row terminator. It works when you leave the row terminator - * out of the bulk insert statement. - */ - if (!(rowTerminator.equals("\n") || rowTerminator.equals("\r\n"))) { - rowTerminatorString = ", ROWTERMINATOR='" + StringEscapeUtils.escapeJava(rowTerminator) + "'"; - } - String sql = String.format("BULK INSERT " + - this.getTargetTable().getQualifiedTableName(quote, catalogSeparator, schemaSeparator) + - " FROM '" + filename) + "'" + - " WITH (DATAFILETYPE='widechar', FIELDTERMINATOR='"+StringEscapeUtils.escapeJava(fieldTerminator)+"', KEEPIDENTITY" + - (fireTriggers ? ", FIRE_TRIGGERS" : "") + rowTerminatorString +");"; - Statement stmt = c.createStatement(); - - //TODO: clean this up, deal with errors, etc.? - stmt.execute(sql); - stmt.close(); - - } catch (SQLException ex) { - throw platform.getSqlTemplate().translate(ex); - } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); - } - this.stagedInputFile.delete(); - createStagingFile(); - loadedRows = 0; - } - } - - protected void createStagingFile() { - //TODO: We should use constants for dir structure path, - // but we don't want to depend on symmetric core. - this.stagedInputFile = stagingManager.create("bulkloaddir", - table.getName() + this.getBatch().getBatchId() + ".csv"); - } - +/** + * Licensed to JumpMind Inc under one or more contributor + * license agreements. See the NOTICE file distributed + * with this work for additional information regarding + * copyright ownership. JumpMind Inc licenses this file + * to you under the GNU General Public License, version 3.0 (GPLv3) + * (the "License"); you may not use this file except in compliance + * with the License. + * + * You should have received a copy of the GNU General Public License, + * version 3.0 (GPLv3) along with this library; if not, see + * . + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.jumpmind.symmetric.io; + +import java.io.OutputStream; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Map; + +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.codec.binary.Hex; +import org.apache.commons.lang.StringEscapeUtils; +import org.apache.commons.lang.StringUtils; +import org.jumpmind.db.model.Column; +import org.jumpmind.db.model.Table; +import org.jumpmind.db.platform.DatabaseInfo; +import org.jumpmind.db.platform.IDatabasePlatform; +import org.jumpmind.db.sql.JdbcSqlTransaction; +import org.jumpmind.db.util.BinaryEncoding; +import org.jumpmind.symmetric.io.data.CsvData; +import org.jumpmind.symmetric.io.data.DataEventType; +import org.jumpmind.symmetric.io.data.writer.DataWriterStatisticConstants; +import org.jumpmind.symmetric.io.data.writer.DefaultDatabaseWriter; +import org.jumpmind.symmetric.io.stage.IStagedResource; +import org.jumpmind.symmetric.io.stage.IStagingManager; +import org.springframework.jdbc.support.nativejdbc.NativeJdbcExtractor; + +public class MsSqlBulkDatabaseWriter extends DefaultDatabaseWriter { + + protected NativeJdbcExtractor jdbcExtractor; + protected int maxRowsBeforeFlush; + protected IStagingManager stagingManager; + protected IStagedResource stagedInputFile; + protected String rowTerminator = "\r\n"; + protected String fieldTerminator = "||"; + protected int loadedRows = 0; + protected boolean fireTriggers; + protected String uncPath; + protected boolean needsBinaryConversion; + protected boolean needsColumnsReordered; + protected Table table = null; + protected Table databaseTable = null; + + public MsSqlBulkDatabaseWriter(IDatabasePlatform platform, + IStagingManager stagingManager, NativeJdbcExtractor jdbcExtractor, + int maxRowsBeforeFlush, boolean fireTriggers, String uncPath, String fieldTerminator, String rowTerminator) { + super(platform); + this.jdbcExtractor = jdbcExtractor; + this.maxRowsBeforeFlush = maxRowsBeforeFlush; + this.stagingManager = stagingManager; + this.fireTriggers = fireTriggers; + if (fieldTerminator != null && fieldTerminator.length() > 0) { + this.fieldTerminator = fieldTerminator; + } + if (rowTerminator != null && rowTerminator.length() > 0) { + this.rowTerminator = rowTerminator; + } + this.uncPath = uncPath; + } + + public boolean start(Table table) { + this.table = table; + if (super.start(table)) { + needsBinaryConversion = false; + if (! batch.getBinaryEncoding().equals(BinaryEncoding.HEX)) { + for (Column column : targetTable.getColumns()) { + if (column.isOfBinaryType()) { + needsBinaryConversion = true; + break; + } + } + } + databaseTable = platform.getTableFromCache(sourceTable.getCatalog(), sourceTable.getSchema(), + sourceTable.getName(), false); + String[] csvNames = targetTable.getColumnNames(); + String[] columnNames = databaseTable.getColumnNames(); + needsColumnsReordered = false; + for (int i = 0; i < csvNames.length; i++) { + if (! csvNames[i].equals(columnNames[i])) { + needsColumnsReordered = true; + break; + } + } + //TODO: Did this because start is getting called multiple times + // for the same table in a single batch before end is being called + if (this.stagedInputFile == null) { + createStagingFile(); + } + return true; + } else { + return false; + } + } + + @Override + public void end(Table table) { + try { + flush(); + this.stagedInputFile.close(); + this.stagedInputFile.delete(); + } finally { + super.end(table); + } + } + + public void write(CsvData data) { + DataEventType dataEventType = data.getDataEventType(); + + switch (dataEventType) { + case INSERT: + statistics.get(batch).increment(DataWriterStatisticConstants.STATEMENTCOUNT); + statistics.get(batch).increment(DataWriterStatisticConstants.LINENUMBER); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); + try { + String[] parsedData = data.getParsedData(CsvData.ROW_DATA); + if (needsBinaryConversion) { + Column[] columns = targetTable.getColumns(); + for (int i = 0; i < columns.length; i++) { + if (columns[i].isOfBinaryType()) { + if (batch.getBinaryEncoding().equals(BinaryEncoding.BASE64) && parsedData[i] != null) { + parsedData[i] = new String(Hex.encodeHex(Base64.decodeBase64(parsedData[i].getBytes()))); + } + } + } + } + OutputStream out = this.stagedInputFile.getOutputStream(); + if (needsColumnsReordered) { + Map mapData = data.toColumnNameValuePairs(targetTable.getColumnNames(), CsvData.ROW_DATA); + String[] columnNames = databaseTable.getColumnNames(); + for (int i = 0; i < columnNames.length; i++) { + String columnData = mapData.get(columnNames[i]); + if (columnData != null) { + out.write(columnData.getBytes()); + } + if (i + 1 < columnNames.length) { + out.write(fieldTerminator.getBytes()); + } + } + } else { + for (int i = 0; i < parsedData.length; i++) { + if (parsedData[i] != null) { + out.write(parsedData[i].getBytes()); + } + if (i + 1 < parsedData.length) { + out.write(fieldTerminator.getBytes()); + } + } + } + out.write(rowTerminator.getBytes()); + loadedRows++; + } catch (Exception ex) { + throw getPlatform().getSqlTemplate().translate(ex); + } finally { + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); + } + break; + case UPDATE: + case DELETE: + default: + flush(); + super.write(data); + break; + } + + if (loadedRows >= maxRowsBeforeFlush) { + flush(); + } + } + + protected void flush() { + if (loadedRows > 0) { + this.stagedInputFile.close(); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); + String filename; + if (StringUtils.isEmpty(uncPath)) { + filename = stagedInputFile.getFile().getAbsolutePath(); + } else { + filename = uncPath + "\\" + stagedInputFile.getFile().getName(); + } + try { + DatabaseInfo dbInfo = platform.getDatabaseInfo(); + String quote = dbInfo.getDelimiterToken(); + String catalogSeparator = dbInfo.getCatalogSeparator(); + String schemaSeparator = dbInfo.getSchemaSeparator(); + JdbcSqlTransaction jdbcTransaction = (JdbcSqlTransaction) transaction; + Connection c = jdbcTransaction.getConnection(); + String rowTerminatorString = ""; + /* + * There seems to be a bug with the SQL server bulk insert when + * you have one row with binary data at the end using \n as the + * row terminator. It works when you leave the row terminator + * out of the bulk insert statement. + */ + if (!(rowTerminator.equals("\n") || rowTerminator.equals("\r\n"))) { + rowTerminatorString = ", ROWTERMINATOR='" + StringEscapeUtils.escapeJava(rowTerminator) + "'"; + } + String sql = String.format("BULK INSERT " + + this.getTargetTable().getQualifiedTableName(quote, catalogSeparator, schemaSeparator) + + " FROM '" + filename) + "'" + + " WITH (DATAFILETYPE='widechar', FIELDTERMINATOR='"+StringEscapeUtils.escapeJava(fieldTerminator)+"', KEEPIDENTITY" + + (fireTriggers ? ", FIRE_TRIGGERS" : "") + rowTerminatorString +");"; + Statement stmt = c.createStatement(); + + //TODO: clean this up, deal with errors, etc.? + stmt.execute(sql); + stmt.close(); + + } catch (SQLException ex) { + throw platform.getSqlTemplate().translate(ex); + } finally { + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); + } + this.stagedInputFile.delete(); + createStagingFile(); + loadedRows = 0; + } + } + + protected void createStagingFile() { + //TODO: We should use constants for dir structure path, + // but we don't want to depend on symmetric core. + this.stagedInputFile = stagingManager.create("bulkloaddir", + table.getName() + this.getBatch().getBatchId() + ".csv"); + } + } \ No newline at end of file diff --git a/symmetric-client/src/main/java/org/jumpmind/symmetric/io/MySqlBulkDatabaseWriter.java b/symmetric-client/src/main/java/org/jumpmind/symmetric/io/MySqlBulkDatabaseWriter.java index 6658c4ee6d..793e258392 100644 --- a/symmetric-client/src/main/java/org/jumpmind/symmetric/io/MySqlBulkDatabaseWriter.java +++ b/symmetric-client/src/main/java/org/jumpmind/symmetric/io/MySqlBulkDatabaseWriter.java @@ -116,7 +116,7 @@ public void write(CsvData data) { case INSERT: statistics.get(batch).increment(DataWriterStatisticConstants.STATEMENTCOUNT); statistics.get(batch).increment(DataWriterStatisticConstants.LINENUMBER); - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); try { String[] parsedData = data.getParsedData(CsvData.ROW_DATA); byte[] byteData = null; @@ -160,7 +160,7 @@ public void write(CsvData data) { } catch (Exception ex) { throw getPlatform().getSqlTemplate().translate(ex); } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } break; case UPDATE: @@ -179,7 +179,7 @@ public void write(CsvData data) { protected void flush() { if (loadedRows > 0) { this.stagedInputFile.close(); - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); try { DatabaseInfo dbInfo = platform.getDatabaseInfo(); String quote = dbInfo.getDelimiterToken(); @@ -203,7 +203,7 @@ protected void flush() { } catch (SQLException ex) { throw platform.getSqlTemplate().translate(ex); } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } this.stagedInputFile.delete(); createStagingFile(); diff --git a/symmetric-client/src/main/java/org/jumpmind/symmetric/io/OracleBulkDatabaseWriter.java b/symmetric-client/src/main/java/org/jumpmind/symmetric/io/OracleBulkDatabaseWriter.java index acbbd8b64b..596b28a20a 100644 --- a/symmetric-client/src/main/java/org/jumpmind/symmetric/io/OracleBulkDatabaseWriter.java +++ b/symmetric-client/src/main/java/org/jumpmind/symmetric/io/OracleBulkDatabaseWriter.java @@ -256,7 +256,7 @@ public Timestamp toUTC(Timestamp timestamp, TimeZone timezone) { } protected void flush() { - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); try { if (rowArrays.size() > 0) { JdbcSqlTransaction jdbcTransaction = (JdbcSqlTransaction) transaction; @@ -331,7 +331,7 @@ protected void flush() { } finally { lastEventType = null; rowArrays.clear(); - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } } diff --git a/symmetric-client/src/main/java/org/jumpmind/symmetric/io/PostgresBulkDatabaseWriter.java b/symmetric-client/src/main/java/org/jumpmind/symmetric/io/PostgresBulkDatabaseWriter.java index 7f7efe8270..98f30b51fe 100644 --- a/symmetric-client/src/main/java/org/jumpmind/symmetric/io/PostgresBulkDatabaseWriter.java +++ b/symmetric-client/src/main/java/org/jumpmind/symmetric/io/PostgresBulkDatabaseWriter.java @@ -70,7 +70,7 @@ public PostgresBulkDatabaseWriter(IDatabasePlatform platform, DatabaseWriterSett public void write(CsvData data) { statistics.get(batch).increment(DataWriterStatisticConstants.STATEMENTCOUNT); statistics.get(batch).increment(DataWriterStatisticConstants.LINENUMBER); - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); DataEventType dataEventType = data.getDataEventType(); @@ -123,7 +123,7 @@ public void write(CsvData data) { loadedRows = 0; } } - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } protected void flush() { diff --git a/symmetric-client/src/main/java/org/jumpmind/symmetric/io/RedshiftBulkDatabaseWriter.java b/symmetric-client/src/main/java/org/jumpmind/symmetric/io/RedshiftBulkDatabaseWriter.java index bf372c036c..5ace09b865 100644 --- a/symmetric-client/src/main/java/org/jumpmind/symmetric/io/RedshiftBulkDatabaseWriter.java +++ b/symmetric-client/src/main/java/org/jumpmind/symmetric/io/RedshiftBulkDatabaseWriter.java @@ -124,7 +124,7 @@ public void write(CsvData data) { case INSERT: statistics.get(batch).increment(DataWriterStatisticConstants.STATEMENTCOUNT); statistics.get(batch).increment(DataWriterStatisticConstants.LINENUMBER); - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); try { String[] parsedData = data.getParsedData(CsvData.ROW_DATA); String formattedData = CsvUtils.escapeCsvData(parsedData, '\n', '"', CsvWriter.ESCAPE_MODE_DOUBLED, "\\N"); @@ -134,7 +134,7 @@ public void write(CsvData data) { } catch (Exception ex) { throw getPlatform().getSqlTemplate().translate(ex); } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } break; case UPDATE: @@ -160,7 +160,7 @@ public void write(CsvData data) { protected void flush() { if (loadedRows > 0) { stagedInputFile.close(); - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); AmazonS3 s3client = new AmazonS3Client(new BasicAWSCredentials(accessKey, secretKey)); if (isNotBlank(s3Endpoint)) { s3client.setEndpoint(s3Endpoint); @@ -192,7 +192,7 @@ protected void flush() { } catch (SQLException ex) { throw platform.getSqlTemplate().translate(ex); } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } stagedInputFile.delete(); diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/model/AbstractBatch.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/model/AbstractBatch.java index ee66fdcb54..a40c86071b 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/model/AbstractBatch.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/model/AbstractBatch.java @@ -28,31 +28,54 @@ import org.apache.commons.lang.StringUtils; import org.jumpmind.symmetric.io.data.Batch; +import org.jumpmind.symmetric.io.data.DataEventType; public class AbstractBatch implements Serializable { - + private static final long serialVersionUID = 1L; + public enum Status { + OK("Ok"), ER("Error"), RQ("Request"), NE("New"), QY("Querying"), SE("Sending"), LD("Loading"), RT("Routing"), IG("Ignored"), RS( + "Resend"), XX("Unknown"); + + private String description; + + Status(String description) { + this.description = description; + } + + @Override + public String toString() { + return description; + } + } + private long batchId = -1; private String nodeId; private String channelId; - - private boolean errorFlag; - + + private boolean errorFlag; + private long routerMillis; private long networkMillis; private long filterMillis; - private long loadMillis; + private long loadMillis; + + private long extractMillis; - private long byteCount; + private long transformExtractMillis; + private long transformLoadMillis; + + private long byteCount; + private long ignoreCount; - + private String sqlState; private int sqlCode; @@ -64,13 +87,105 @@ public class AbstractBatch implements Serializable { private Date lastUpdatedTime; private Date createTime; - + private String createBy; - + private String summary; + + private Status status; + + private boolean loadFlag; + + private long extractCount; + + private long sentCount; + + private long loadCount; + + private long reloadRowCount; + + private long otherRowCount; + + private long dataRowCount; + + private long dataInsertRowCount; + + private long dataUpdateRowCount; + + private long dataDeleteRowCount; + + private long oldDataRowCount = 0; + private long oldByteCount = 0; + private long oldFilterMillis = 0; + private long oldExtractMillis = 0; + private long oldLoadMillis = 0; + private long oldNetworkMillis = 0; + + private long loadId = -1; + + private boolean commonFlag; + + private long fallbackInsertCount; + + private long fallbackUpdateCount; + + private long ignoreRowCount; + + private long missingDeleteCount; + + private long skipCount; + + private long loadRowCount; + + private long loadInsertRowCount; + + private long loadUpdateRowCount; + + private long loadDeleteRowCount; - private transient Map tableCounts = new LinkedHashMap(); + private long extractRowCount; + + private long extractInsertRowCount; + + private long extractUpdateRowCount; + + private long extractDeleteRowCount; + private long failedDataId; + + private transient Map tableCounts = new LinkedHashMap(); + + public void resetStats() { + // save off old stats in case there + // is an error and we want to be able to + // restore the previous stats + + this.oldExtractMillis = this.extractMillis; + this.oldDataRowCount = this.dataRowCount; + this.oldByteCount = getByteCount(); + this.oldNetworkMillis = getNetworkMillis(); + this.oldFilterMillis = getFilterMillis(); + this.oldLoadMillis = getLoadMillis(); + + this.extractMillis = 0; + this.dataRowCount = 0; + setByteCount(0); + setNetworkMillis(0); + setFilterMillis(0); + setLoadMillis(0); + } + + public void revertStatsOnError() { + if (this.oldDataRowCount > 0) { + this.extractMillis = this.oldExtractMillis; + this.dataRowCount = this.oldDataRowCount; + setByteCount(this.oldByteCount); + setNetworkMillis(this.oldNetworkMillis); + setFilterMillis(this.oldFilterMillis); + setLoadMillis(this.oldLoadMillis); + } + } + public long getBatchId() { return batchId; } @@ -78,7 +193,7 @@ public long getBatchId() { public void setBatchId(long batchId) { this.batchId = batchId; } - + public String getNodeId() { return nodeId; } @@ -86,19 +201,19 @@ public String getNodeId() { public void setNodeId(String nodeId) { this.nodeId = nodeId; } - + public String getChannelId() { return channelId; } public void setChannelId(String channelId) { this.channelId = channelId; - } + } public String getNodeBatchId() { return nodeId + "-" + batchId; } - + public void setErrorFlag(boolean errorFlag) { this.errorFlag = errorFlag; } @@ -109,8 +224,8 @@ public boolean isErrorFlag() { public long getRouterMillis() { return routerMillis; - } - + } + public void setRouterMillis(long routerMillis) { this.routerMillis = routerMillis; } @@ -135,8 +250,32 @@ public long getLoadMillis() { return loadMillis; } - public void setLoadMillis(long databaseMillis) { - this.loadMillis = databaseMillis; + public void setLoadMillis(long loadMillis) { + this.loadMillis = loadMillis; + } + + public void setExtractMillis(long extractMillis) { + this.extractMillis = extractMillis; + } + + public long getExtractMillis() { + return extractMillis; + } + + public long getTransformExtractMillis() { + return transformExtractMillis; + } + + public void setTransformExtractMillis(long transformExtractMillis) { + this.transformExtractMillis = transformExtractMillis; + } + + public long getTransformLoadMillis() { + return transformLoadMillis; + } + + public void setTransformLoadMillis(long transformLoadMillis) { + this.transformLoadMillis = transformLoadMillis; } public long getByteCount() { @@ -145,28 +284,28 @@ public long getByteCount() { public void setByteCount(long byteCount) { this.byteCount = byteCount; - } - + } + public String getStagedLocation() { return Batch.getStagedLocation(false, getNodeId()); } - + public void incrementByteCount(int size) { this.byteCount += size; - } - + } + public void setIgnoreCount(long ignoreCount) { this.ignoreCount = ignoreCount; } - + public void incrementIgnoreCount() { this.ignoreCount++; } - + public long getIgnoreCount() { return ignoreCount; } - + public String getSqlState() { return sqlState; } @@ -214,47 +353,307 @@ public Date getCreateTime() { public void setCreateTime(Date createTime) { this.createTime = createTime; } - + public void setCreateBy(String createBy) { this.createBy = createBy; } - + public String getCreateBy() { return createBy; } - + public String getSummary() { if ((summary == null || summary.length() == 0) && tableCounts != null) { summary = buildBatchSummary(); } return summary; } - + public void setSummary(String summary) { this.summary = summary; } - + protected String buildBatchSummary() { final int SIZE = 254; StringBuilder buff = new StringBuilder(SIZE); - + for (Entry tableCount : tableCounts.entrySet()) { buff.append(tableCount.getKey()).append(", "); } - - if (buff.length() > 2) { - buff.setLength(buff.length()-2); + + if (buff.length() > 2) { + buff.setLength(buff.length() - 2); } - - return StringUtils.abbreviate(buff.toString(), SIZE); + + return StringUtils.abbreviate(buff.toString(), SIZE); } - + public void incrementTableCount(String tableName) { - Integer count = tableCounts.get(tableName); + Integer count = tableCounts.get(tableName); if (count == null) { count = Integer.valueOf(0); } tableCounts.put(tableName, ++count); summary = null; } + + public Status getStatus() { + return status; + } + + public void setStatus(Status status) { + this.status = status; + } + + public void setStatus(String status) { + try { + this.status = Status.valueOf(status); + } catch (IllegalArgumentException e) { + this.status = Status.XX; + } + } + + public void setLoadFlag(boolean loadFlag) { + this.loadFlag = loadFlag; + } + + public boolean isLoadFlag() { + return loadFlag; + } + + public void setExtractCount(long extractCount) { + this.extractCount = extractCount; + } + + public long getExtractCount() { + return extractCount; + } + + public void setSentCount(long sentCount) { + this.sentCount = sentCount; + } + + public long getSentCount() { + return sentCount; + } + + public void setLoadCount(long loadCount) { + this.loadCount = loadCount; + } + + public long getLoadCount() { + return loadCount; + } + + public void setReloadRowCount(long reloadRowCount) { + this.reloadRowCount = reloadRowCount; + } + + public long getReloadRowCount() { + return reloadRowCount; + } + + public void setOtherRowCount(long otherRowCount) { + this.otherRowCount = otherRowCount; + } + + public long getOtherRowCount() { + return otherRowCount; + } + + public void setDataUpdateRowCount(long dataUpdateRowCount) { + this.dataUpdateRowCount = dataUpdateRowCount; + } + + public long getDataUpdateRowCount() { + return dataUpdateRowCount; + } + + public void setDataDeleteRowCount(long dataDeleteRowCount) { + this.dataDeleteRowCount = dataDeleteRowCount; + } + + public long getDataDeleteRowCount() { + return dataDeleteRowCount; + } + + public void incrementEventCount(DataEventType type) { + switch (type) { + case RELOAD: + reloadRowCount++; + break; + case INSERT: + dataInsertRowCount++; + break; + case UPDATE: + dataUpdateRowCount++; + break; + case DELETE: + dataDeleteRowCount++; + break; + default: + otherRowCount++; + break; + } + } + + public void setDataInsertRowCount(long dataInsertRowCount) { + this.dataInsertRowCount = dataInsertRowCount; + } + + public long getDataInsertRowCount() { + return dataInsertRowCount; + } + + public long getDataRowCount() { + return dataRowCount; + } + + public void setDataRowCount(long dataRowCount) { + this.dataRowCount = dataRowCount; + } + + public void incrementDataRowCount() { + this.dataRowCount++; + } + + public void incrementDataInsertRowCount() { + this.dataInsertRowCount++; + } + + public long totalRowCount() { + return dataInsertRowCount + dataUpdateRowCount + dataDeleteRowCount + otherRowCount; + } + + public void setLoadId(long loadId) { + this.loadId = loadId; + } + + public long getLoadId() { + return loadId; + } + + public void setCommonFlag(boolean commonFlag) { + this.commonFlag = commonFlag; + } + + public boolean isCommonFlag() { + return commonFlag; + } + + public long getFallbackInsertCount() { + return fallbackInsertCount; + } + + public void setFallbackInsertCount(long fallbackInsertCount) { + this.fallbackInsertCount = fallbackInsertCount; + } + + public long getFallbackUpdateCount() { + return fallbackUpdateCount; + } + + public void setFallbackUpdateCount(long fallbackUpdateCount) { + this.fallbackUpdateCount = fallbackUpdateCount; + } + + public long getMissingDeleteCount() { + return missingDeleteCount; + } + + public void setMissingDeleteCount(long missingDeleteCount) { + this.missingDeleteCount = missingDeleteCount; + } + + public void setSkipCount(long skipCount) { + this.skipCount = skipCount; + } + + public long getSkipCount() { + return skipCount; + } + + public long getIgnoreRowCount() { + return ignoreRowCount; + } + + public void incrementIgnoreRowCount() { + this.ignoreRowCount++; + } + + public void setIgnoreRowCount(long ignoreRowCount) { + this.ignoreRowCount = ignoreRowCount; + } + + public long getLoadRowCount() { + return loadRowCount; + } + + public void setLoadRowCount(long loadRowCount) { + this.loadRowCount = loadRowCount; + } + + public long getLoadInsertRowCount() { + return loadInsertRowCount; + } + + public void setLoadInsertRowCount(long loadInsertRowCount) { + this.loadInsertRowCount = loadInsertRowCount; + } + + public long getLoadUpdateRowCount() { + return loadUpdateRowCount; + } + + public void setLoadUpdateRowCount(long loadUpdateRowCount) { + this.loadUpdateRowCount = loadUpdateRowCount; + } + + public long getLoadDeleteRowCount() { + return loadDeleteRowCount; + } + + public void setLoadDeleteRowCount(long loadDeleteRowCount) { + this.loadDeleteRowCount = loadDeleteRowCount; + } + + public long getExtractRowCount() { + return extractRowCount; + } + + public void setExtractRowCount(long extractRowCount) { + this.extractRowCount = extractRowCount; + } + + public long getExtractInsertRowCount() { + return extractInsertRowCount; + } + + public void setExtractInsertRowCount(long extractInsertRowCount) { + this.extractInsertRowCount = extractInsertRowCount; + } + + public long getExtractUpdateRowCount() { + return extractUpdateRowCount; + } + + public void setExtractUpdateRowCount(long extractUpdateRowCount) { + this.extractUpdateRowCount = extractUpdateRowCount; + } + + public long getExtractDeleteRowCount() { + return extractDeleteRowCount; + } + + public void setExtractDeleteRowCount(long extractDeleteRowCount) { + this.extractDeleteRowCount = extractDeleteRowCount; + } + + public long getFailedDataId() { + return failedDataId; + } + + public void setFailedDataId(long failedDataId) { + this.failedDataId = failedDataId; + } } diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/model/BatchAck.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/model/BatchAck.java index b0233fec11..f9f5956074 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/model/BatchAck.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/model/BatchAck.java @@ -1,170 +1,170 @@ -/** - * Licensed to JumpMind Inc under one or more contributor - * license agreements. See the NOTICE file distributed - * with this work for additional information regarding - * copyright ownership. JumpMind Inc licenses this file - * to you under the GNU General Public License, version 3.0 (GPLv3) - * (the "License"); you may not use this file except in compliance - * with the License. - * - * You should have received a copy of the GNU General Public License, - * version 3.0 (GPLv3) along with this library; if not, see - * . - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.jumpmind.symmetric.model; +/** + * Licensed to JumpMind Inc under one or more contributor + * license agreements. See the NOTICE file distributed + * with this work for additional information regarding + * copyright ownership. JumpMind Inc licenses this file + * to you under the GNU General Public License, version 3.0 (GPLv3) + * (the "License"); you may not use this file except in compliance + * with the License. + * + * You should have received a copy of the GNU General Public License, + * version 3.0 (GPLv3) along with this library; if not, see + * . + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.jumpmind.symmetric.model; import java.io.Serializable; - + /** * Status of a batch acknowledgment */ public class BatchAck implements Serializable { - private static final long serialVersionUID = 1L; - - private long batchId; - - /** - * The node id of the node that successfully loaded the batch. - */ - private String nodeId; - - private boolean isOk; - - private boolean isResend; - - private long errorLine; - - private long networkMillis; - - private long filterMillis; - - private long databaseMillis; - - private long startTime; - - private long byteCount; - - private String sqlState; - + private static final long serialVersionUID = 1L; + + private long batchId; + + /** + * The node id of the node that successfully loaded the batch. + */ + private String nodeId; + + private boolean isOk; + + private boolean isResend; + + private long errorLine; + + private long networkMillis; + + private long filterMillis; + + private long loadMillis; + + private long startTime; + + private long byteCount; + + private String sqlState; + private int sqlCode; - private boolean ignored = false; - - private String sqlMessage; - - public BatchAck(long batchId) { - this.batchId = batchId; - isOk = true; - } - - public BatchAck(long batchId, long errorLineNumber) { - this.batchId = batchId; - isOk = false; - errorLine = errorLineNumber; - } - - public long getBatchId() { - return batchId; - } - - public long getErrorLine() { - return errorLine; - } - - public boolean isOk() { - return isOk; - } - - public boolean isResend() { - return isResend; - } - - public void setBatchId(long batchId) { - this.batchId = batchId; - } - - public void setErrorLine(long errorLine) { - this.errorLine = errorLine; - } - - public void setOk(boolean isOk) { - this.isOk = isOk; - } - - public void setResend(boolean isResend) { - this.isResend = isResend; - } - - public long getByteCount() { - return byteCount; - } - - public void setByteCount(long byteCount) { - this.byteCount = byteCount; - } - - public long getDatabaseMillis() { - return databaseMillis; - } - - public void setDatabaseMillis(long databaseMillis) { - this.databaseMillis = databaseMillis; - } - - public long getFilterMillis() { - return filterMillis; - } - - public void setFilterMillis(long filterMillis) { - this.filterMillis = filterMillis; - } - - public long getNetworkMillis() { - return networkMillis; - } - - public void setNetworkMillis(long networkMillis) { - this.networkMillis = networkMillis; - } - - public int getSqlCode() { - return sqlCode; - } - - public void setSqlCode(int sqlCode) { - this.sqlCode = sqlCode; - } - - public String getSqlMessage() { - return sqlMessage; - } - - public void setSqlMessage(String sqlMessage) { - this.sqlMessage = sqlMessage; - } - - public String getSqlState() { - return sqlState; - } - - public void setSqlState(String sqlState) { - this.sqlState = sqlState; - } - - public String getNodeId() { - return nodeId; - } - - public void setNodeId(String nodeId) { - this.nodeId = nodeId; + private boolean ignored = false; + + private String sqlMessage; + + public BatchAck(long batchId) { + this.batchId = batchId; + isOk = true; + } + + public BatchAck(long batchId, long errorLineNumber) { + this.batchId = batchId; + isOk = false; + errorLine = errorLineNumber; + } + + public long getBatchId() { + return batchId; + } + + public long getErrorLine() { + return errorLine; + } + + public boolean isOk() { + return isOk; + } + + public boolean isResend() { + return isResend; + } + + public void setBatchId(long batchId) { + this.batchId = batchId; + } + + public void setErrorLine(long errorLine) { + this.errorLine = errorLine; + } + + public void setOk(boolean isOk) { + this.isOk = isOk; + } + + public void setResend(boolean isResend) { + this.isResend = isResend; + } + + public long getByteCount() { + return byteCount; + } + + public void setByteCount(long byteCount) { + this.byteCount = byteCount; + } + + public long getLoadMillis() { + return loadMillis; + } + + public void setLoadMillis(long loadMillis) { + this.loadMillis = loadMillis; + } + + public long getFilterMillis() { + return filterMillis; + } + + public void setFilterMillis(long filterMillis) { + this.filterMillis = filterMillis; + } + + public long getNetworkMillis() { + return networkMillis; + } + + public void setNetworkMillis(long networkMillis) { + this.networkMillis = networkMillis; + } + + public int getSqlCode() { + return sqlCode; + } + + public void setSqlCode(int sqlCode) { + this.sqlCode = sqlCode; + } + + public String getSqlMessage() { + return sqlMessage; + } + + public void setSqlMessage(String sqlMessage) { + this.sqlMessage = sqlMessage; + } + + public String getSqlState() { + return sqlState; + } + + public void setSqlState(String sqlState) { + this.sqlState = sqlState; + } + + public String getNodeId() { + return nodeId; + } + + public void setNodeId(String nodeId) { + this.nodeId = nodeId; } public void setIgnored(boolean ignored) { @@ -173,16 +173,16 @@ public void setIgnored(boolean ignored) { public boolean isIgnored() { return ignored; - } - - public long getStartTime() { - return startTime; - } - - public void setStartTime(long startTime) { - this.startTime = startTime; - } - - - + } + + public long getStartTime() { + return startTime; + } + + public void setStartTime(long startTime) { + this.startTime = startTime; + } + + + } \ No newline at end of file diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/model/IncomingBatch.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/model/IncomingBatch.java index 1c65994cb7..f6e32b50e5 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/model/IncomingBatch.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/model/IncomingBatch.java @@ -32,40 +32,9 @@ public class IncomingBatch extends AbstractBatch { private static final long serialVersionUID = 1L; - public enum Status { - OK("Ok"), ER("Error"), LD("Loading"), RS("Resend"), IG("Ignored"), XX("Unknown"); - - private String description; - - Status(String desc) { - this.description = desc; - } - - @Override - public String toString() { - return description; - } - } - - private Status status; - - private long databaseMillis; - - private long statementCount; - - private long fallbackInsertCount; - - private long fallbackUpdateCount; - - private long ignoreRowCount; - - private long missingDeleteCount; - - private long skipCount; - private long failedRowNumber; - - private long failedLineNumber; + + private long failedLineNumber; private long startTime; @@ -78,61 +47,41 @@ public IncomingBatch(Batch batch) { setBatchId(batch.getBatchId()); setNodeId(batch.getSourceNodeId()); setChannelId(batch.getChannelId()); - this.status = Status.LD; + setStatus(Status.LD); } - public void setValues(Statistics readerStatistics, Statistics writerStatistics, - boolean isSuccess) { + public void setValues(Statistics readerStatistics, Statistics writerStatistics, boolean isSuccess) { if (readerStatistics != null) { setByteCount(readerStatistics.get(DataReaderStatistics.READ_BYTE_COUNT)); } if (writerStatistics != null) { setFilterMillis(writerStatistics.get(DataWriterStatisticConstants.FILTERMILLIS)); - databaseMillis = writerStatistics.get(DataWriterStatisticConstants.DATABASEMILLIS); - statementCount = writerStatistics.get(DataWriterStatisticConstants.STATEMENTCOUNT); - fallbackInsertCount = writerStatistics - .get(DataWriterStatisticConstants.FALLBACKINSERTCOUNT); - fallbackUpdateCount = writerStatistics - .get(DataWriterStatisticConstants.FALLBACKUPDATECOUNT); - missingDeleteCount = writerStatistics - .get(DataWriterStatisticConstants.MISSINGDELETECOUNT); + setLoadMillis(writerStatistics.get(DataWriterStatisticConstants.LOADMILLIS)); + setLoadRowCount(writerStatistics.get(DataWriterStatisticConstants.STATEMENTCOUNT)); + setFallbackInsertCount(writerStatistics.get(DataWriterStatisticConstants.FALLBACKINSERTCOUNT)); + setFallbackUpdateCount(writerStatistics.get(DataWriterStatisticConstants.FALLBACKUPDATECOUNT)); + setMissingDeleteCount(writerStatistics.get(DataWriterStatisticConstants.MISSINGDELETECOUNT)); setIgnoreCount(writerStatistics.get(DataWriterStatisticConstants.IGNORECOUNT)); - ignoreRowCount = writerStatistics.get(DataWriterStatisticConstants.IGNOREROWCOUNT); + setIgnoreRowCount(writerStatistics.get(DataWriterStatisticConstants.IGNOREROWCOUNT)); setStartTime(writerStatistics.get(DataWriterStatisticConstants.STARTTIME)); setLastUpdatedTime(new Date()); if (!isSuccess) { - failedRowNumber = statementCount; + failedRowNumber = getLoadRowCount(); failedLineNumber = writerStatistics.get(DataWriterStatisticConstants.LINENUMBER); } } } - + public void setNodeBatchId(String value) { if (value != null) { int splitIndex = value.indexOf("-"); if (splitIndex > 0) { setNodeId(value.substring(0, splitIndex)); - setBatchId(Long.parseLong(value.substring(splitIndex+1))); + setBatchId(Long.parseLong(value.substring(splitIndex + 1))); } } } - public Status getStatus() { - return status; - } - - public void setStatus(Status status) { - this.status = status; - } - - public void setStatus(String status) { - try { - this.status = Status.valueOf(status); - } catch (IllegalArgumentException e) { - this.status = Status.XX; - } - } - public boolean isRetry() { return retry; } @@ -141,54 +90,6 @@ public void setRetry(boolean isRetry) { this.retry = isRetry; } - public long getDatabaseMillis() { - return databaseMillis; - } - - public void setDatabaseMillis(long databaseMillis) { - this.databaseMillis = databaseMillis; - } - - public long getStatementCount() { - return statementCount; - } - - public void setStatementCount(long statementCount) { - this.statementCount = statementCount; - } - - public long getFallbackInsertCount() { - return fallbackInsertCount; - } - - public void setFallbackInsertCount(long fallbackInsertCount) { - this.fallbackInsertCount = fallbackInsertCount; - } - - public long getFallbackUpdateCount() { - return fallbackUpdateCount; - } - - public void setFallbackUpdateCount(long fallbackUpdateCount) { - this.fallbackUpdateCount = fallbackUpdateCount; - } - - public long getMissingDeleteCount() { - return missingDeleteCount; - } - - public void setMissingDeleteCount(long missingDeleteCount) { - this.missingDeleteCount = missingDeleteCount; - } - - public void setSkipCount(long skipCount) { - this.skipCount = skipCount; - } - - public long getSkipCount() { - return skipCount; - } - public long getFailedRowNumber() { return failedRowNumber; } @@ -218,22 +119,10 @@ public boolean isPersistable() { public void setFailedLineNumber(long failedLineNumber) { this.failedLineNumber = failedLineNumber; } - + public long getFailedLineNumber() { return failedLineNumber; } - - public long getIgnoreRowCount() { - return ignoreRowCount; - } - - public void incrementIgnoreRowCount() { - this.ignoreRowCount++; - } - - public void setIgnoreRowCount(long ignoreRowCount) { - this.ignoreRowCount = ignoreRowCount; - } @Override public String toString() { @@ -248,7 +137,7 @@ public boolean equals(Object o) { IncomingBatch b = (IncomingBatch) o; return getBatchId() == b.getBatchId() && StringUtils.equals(getNodeId(), b.getNodeId()); } - + @Override public int hashCode() { return (String.valueOf(getBatchId()) + "-" + getNodeId()).hashCode(); diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/model/OutgoingBatch.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/model/OutgoingBatch.java index 4b372f0ede..c7f83465fd 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/model/OutgoingBatch.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/model/OutgoingBatch.java @@ -23,7 +23,6 @@ import java.util.Date; import org.jumpmind.symmetric.io.data.Batch; -import org.jumpmind.symmetric.io.data.DataEventType; /** * Used for tracking the sending a collection of data to a node in the system. A @@ -37,247 +36,25 @@ public class OutgoingBatch extends AbstractBatch { private static final long serialVersionUID = 1L; - public enum Status { - OK("Ok"), ER("Error"), RQ("Request"), NE("New"), QY("Querying"), SE("Sending"), LD("Loading"), RT("Routing"), IG("Ignored"), - RS("Resend"), XX("Unknown"); - - private String description; - - Status(String description) { - this.description = description; - } - - @Override - public String toString() { - return description; - } - } - - private long loadId = -1; - - private Status status = Status.RT; - - private boolean loadFlag; - private boolean extractJobFlag; - - private boolean commonFlag; - - private long extractMillis; - - private long sentCount; - - private long extractCount; - - private long loadCount; - - private long dataEventCount; - private long reloadEventCount; - - private long insertEventCount; - - private long updateEventCount; - - private long deleteEventCount; - - private long otherEventCount; - - private long failedDataId; - private Date extractStartTime; - + private Date transferStartTime; - + private Date loadStartTime; - - private long oldDataEventCount = 0; - private long oldByteCount = 0; - private long oldFilterMillis = 0; - private long oldExtractMillis = 0; - private long oldLoadMillis = 0; - private long oldNetworkMillis = 0; - + public OutgoingBatch() { + setStatus(Status.RT); } public OutgoingBatch(String nodeId, String channelId, Status status) { setNodeId(nodeId); setChannelId(channelId); - this.status = status; + setStatus(status); setCreateTime(new Date()); } - public void resetStats() { - // save off old stats in case there - // is an error and we want to be able to - // restore the previous stats - this.oldExtractMillis = this.extractMillis; - this.oldDataEventCount = this.dataEventCount; - this.oldByteCount = getByteCount(); - this.oldNetworkMillis = getNetworkMillis(); - this.oldFilterMillis = getFilterMillis(); - this.oldLoadMillis = getLoadMillis(); - - this.extractMillis = 0; - this.dataEventCount = 0; - setByteCount(0); - setNetworkMillis(0); - setFilterMillis(0); - setLoadMillis(0); - } - - public void revertStatsOnError() { - if (this.oldDataEventCount > 0) { - this.extractMillis = this.oldExtractMillis; - this.dataEventCount = this.oldDataEventCount; - setByteCount(this.oldByteCount); - setNetworkMillis(this.oldNetworkMillis); - setFilterMillis(this.oldFilterMillis); - setLoadMillis(this.oldLoadMillis); - } - } - - public void setLoadFlag(boolean loadFlag) { - this.loadFlag = loadFlag; - } - - public boolean isLoadFlag() { - return loadFlag; - } - - public void setSentCount(long sentCount) { - this.sentCount = sentCount; - } - - public long getSentCount() { - return sentCount; - } - - public void setExtractCount(long extractCount) { - this.extractCount = extractCount; - } - - public long getExtractCount() { - return extractCount; - } - - public void setLoadCount(long loadCount) { - this.loadCount = loadCount; - } - - public long getLoadCount() { - return loadCount; - } - - public Status getStatus() { - return status; - } - - public void setStatus(Status status) { - this.status = status; - } - - public void setStatus(String status) { - try { - this.status = Status.valueOf(status); - } catch (IllegalArgumentException e) { - this.status = Status.XX; - } - } - - public void setUpdateEventCount(long updateEventCount) { - this.updateEventCount = updateEventCount; - } - - public long getUpdateEventCount() { - return updateEventCount; - } - - public void setDeleteEventCount(long deleteEventCount) { - this.deleteEventCount = deleteEventCount; - } - - public long getDeleteEventCount() { - return deleteEventCount; - } - - public void incrementEventCount(DataEventType type) { - switch (type) { - case RELOAD: - reloadEventCount++; - break; - case INSERT: - insertEventCount++; - break; - case UPDATE: - updateEventCount++; - break; - case DELETE: - deleteEventCount++; - break; - default: - otherEventCount++; - break; - } - } - - public void setInsertEventCount(long insertEventCount) { - this.insertEventCount = insertEventCount; - } - - public long getInsertEventCount() { - return insertEventCount; - } - - public void setOtherEventCount(long otherEventCount) { - this.otherEventCount = otherEventCount; - } - - public long getOtherEventCount() { - return otherEventCount; - } - - public void setReloadEventCount(long reloadEventCount) { - this.reloadEventCount = reloadEventCount; - } - - public long getReloadEventCount() { - return reloadEventCount; - } - - public long getDataEventCount() { - return dataEventCount; - } - - public void setDataEventCount(long dataEventCount) { - this.dataEventCount = dataEventCount; - } - - public void setExtractMillis(long extractMillis) { - this.extractMillis = extractMillis; - } - - public long getExtractMillis() { - return extractMillis; - } - - public void incrementDataEventCount() { - this.dataEventCount++; - } - - public void incrementInsertEventCount() { - this.insertEventCount++; - } - - public long getFailedDataId() { - return failedDataId; - } - - public void setFailedDataId(long failedDataId) { - this.failedDataId = failedDataId; - } - @Override public Date getLastUpdatedTime() { if (super.getLastUpdatedTime() == null) { @@ -285,47 +62,26 @@ public Date getLastUpdatedTime() { } else { return super.getLastUpdatedTime(); } - } - - public long totalEventCount() { - return insertEventCount + updateEventCount + deleteEventCount + otherEventCount; - } - - public void setCommonFlag(boolean commonFlag) { - this.commonFlag = commonFlag; - } - - public boolean isCommonFlag() { - return commonFlag; } - + @Override public String getStagedLocation() { - return Batch.getStagedLocation(commonFlag, getNodeId()); + return Batch.getStagedLocation(isCommonFlag(), getNodeId()); } - + @Override public String toString() { return getNodeBatchId(); } - - public void setLoadId(long loadId) { - this.loadId = loadId; - } - public long getLoadId() { - return loadId; - } - public void setExtractJobFlag(boolean extractJobFlag) { this.extractJobFlag = extractJobFlag; } - + public boolean isExtractJobFlag() { return extractJobFlag; } - public Date getExtractStartTime() { return extractStartTime; } diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/model/OutgoingBatchWithPayload.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/model/OutgoingBatchWithPayload.java index f0c979516b..1a98e7bb9c 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/model/OutgoingBatchWithPayload.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/model/OutgoingBatchWithPayload.java @@ -1,89 +1,89 @@ -/** - * Licensed to JumpMind Inc under one or more contributor - * license agreements. See the NOTICE file distributed - * with this work for additional information regarding - * copyright ownership. JumpMind Inc licenses this file - * to you under the GNU General Public License, version 3.0 (GPLv3) - * (the "License"); you may not use this file except in compliance - * with the License. - * - * You should have received a copy of the GNU General Public License, - * version 3.0 (GPLv3) along with this library; if not, see - * . - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.jumpmind.symmetric.model; - -import java.io.Serializable; -import java.util.List; - -import org.jumpmind.symmetric.io.data.writer.StructureDataWriter.PayloadType; -import org.jumpmind.symmetric.model.OutgoingBatch.Status; - -public class OutgoingBatchWithPayload implements Serializable { - - private static final long serialVersionUID = 1L; - - private List payload; - - private PayloadType payloadType; - - private Status status; - - private long batchId; - - private String channelId; - - public OutgoingBatchWithPayload(OutgoingBatch batch, PayloadType payloadType) { - this.status = batch.getStatus(); - this.batchId = batch.getBatchId(); - this.channelId = batch.getChannelId(); - this.payloadType = payloadType; - } - - public PayloadType getPayloadType() { - return payloadType; - } - - public void setPayloadType(PayloadType payloadType) { - this.payloadType = payloadType; - } - - public List getPayload() { - return payload; - } - - public void setPayload(List payload) { - this.payload = payload; - } - - public Status getStatus() { - return status; - } - - public void setStatus(Status status) { - this.status = status; - } - - public long getBatchId() { - return batchId; - } - - public void setBatchId(long batchId) { - this.batchId = batchId; - } - - public void setChannelId(String channelId) { - this.channelId = channelId; - } - - public String getChannelId() { - return channelId; - } -} +/** + * Licensed to JumpMind Inc under one or more contributor + * license agreements. See the NOTICE file distributed + * with this work for additional information regarding + * copyright ownership. JumpMind Inc licenses this file + * to you under the GNU General Public License, version 3.0 (GPLv3) + * (the "License"); you may not use this file except in compliance + * with the License. + * + * You should have received a copy of the GNU General Public License, + * version 3.0 (GPLv3) along with this library; if not, see + * . + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.jumpmind.symmetric.model; + +import java.io.Serializable; +import java.util.List; + +import org.jumpmind.symmetric.io.data.writer.StructureDataWriter.PayloadType; +import org.jumpmind.symmetric.model.AbstractBatch.Status; + +public class OutgoingBatchWithPayload implements Serializable { + + private static final long serialVersionUID = 1L; + + private List payload; + + private PayloadType payloadType; + + private Status status; + + private long batchId; + + private String channelId; + + public OutgoingBatchWithPayload(OutgoingBatch batch, PayloadType payloadType) { + this.status = batch.getStatus(); + this.batchId = batch.getBatchId(); + this.channelId = batch.getChannelId(); + this.payloadType = payloadType; + } + + public PayloadType getPayloadType() { + return payloadType; + } + + public void setPayloadType(PayloadType payloadType) { + this.payloadType = payloadType; + } + + public List getPayload() { + return payload; + } + + public void setPayload(List payload) { + this.payload = payload; + } + + public Status getStatus() { + return status; + } + + public void setStatus(Status status) { + this.status = status; + } + + public long getBatchId() { + return batchId; + } + + public void setBatchId(long batchId) { + this.batchId = batchId; + } + + public void setChannelId(String channelId) { + this.channelId = channelId; + } + + public String getChannelId() { + return channelId; + } +} diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/model/RemoteNodeStatus.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/model/RemoteNodeStatus.java index aa202faf75..771910d0dd 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/model/RemoteNodeStatus.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/model/RemoteNodeStatus.java @@ -96,7 +96,7 @@ public void updateIncomingStatus(List incomingBatches) { if (incomingBatches != null) { for (IncomingBatch incomingBatch : incomingBatches) { if (incomingBatch.getIgnoreCount() == 0) { - dataProcessed += incomingBatch.getStatementCount(); + dataProcessed += incomingBatch.getLoadRowCount(); } batchesProcessed++; if (incomingBatch.getStatus() == org.jumpmind.symmetric.model.IncomingBatch.Status.ER) { @@ -123,7 +123,7 @@ public void updateOutgoingStatus(List outgoingBatches, List= dataMetaData.getNodeChannel().getMaxBatchSize() + return batch.getDataRowCount() >= dataMetaData.getNodeChannel().getMaxBatchSize() && routingContext.isEncountedTransactionBoundary(); } diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/route/NonTransactionalBatchAlgorithm.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/route/NonTransactionalBatchAlgorithm.java index 7381a3023c..02d418887a 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/route/NonTransactionalBatchAlgorithm.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/route/NonTransactionalBatchAlgorithm.java @@ -34,7 +34,7 @@ public class NonTransactionalBatchAlgorithm implements IBatchAlgorithm, IBuiltIn public static final String NAME = "nontransactional"; public boolean isBatchComplete(OutgoingBatch batch, DataMetaData dataMetaData, SimpleRouterContext routingContext) { - return batch.getDataEventCount() >= dataMetaData.getNodeChannel().getMaxBatchSize(); + return batch.getDataRowCount() >= dataMetaData.getNodeChannel().getMaxBatchSize(); } } \ No newline at end of file diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/IDataService.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/IDataService.java index 3fa46e6e3e..0ee32a55f8 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/IDataService.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/IDataService.java @@ -1,170 +1,170 @@ -/** - * Licensed to JumpMind Inc under one or more contributor - * license agreements. See the NOTICE file distributed - * with this work for additional information regarding - * copyright ownership. JumpMind Inc licenses this file - * to you under the GNU General Public License, version 3.0 (GPLv3) - * (the "License"); you may not use this file except in compliance - * with the License. - * - * You should have received a copy of the GNU General Public License, - * version 3.0 (GPLv3) along with this library; if not, see - * . - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.jumpmind.symmetric.service; - -import java.util.Date; -import java.util.List; -import java.util.Map; - -import org.jumpmind.db.sql.ISqlReadCursor; -import org.jumpmind.db.sql.ISqlTransaction; -import org.jumpmind.db.sql.Row; -import org.jumpmind.symmetric.ext.IHeartbeatListener; -import org.jumpmind.symmetric.io.data.Batch; -import org.jumpmind.symmetric.model.Data; -import org.jumpmind.symmetric.model.DataEvent; -import org.jumpmind.symmetric.model.DataGap; -import org.jumpmind.symmetric.model.Node; -import org.jumpmind.symmetric.model.OutgoingBatch.Status; -import org.jumpmind.symmetric.model.ProcessInfo; -import org.jumpmind.symmetric.model.TableReloadRequest; -import org.jumpmind.symmetric.model.TableReloadRequestKey; -import org.jumpmind.symmetric.model.TriggerHistory; -import org.jumpmind.symmetric.model.TriggerRouter; - -/** - * This service provides an API to access and update {@link Data}. - */ -public interface IDataService { - - public void insertTableReloadRequest(TableReloadRequest request); - - public TableReloadRequest getTableReloadRequest(TableReloadRequestKey key); - - public List getTableReloadRequestToProcess(final String sourceNodeId); - - public String reloadNode(String nodeId, boolean reverseLoad, String createBy); - - public String reloadTable(String nodeId, String catalogName, String schemaName, String tableName); - - public String reloadTable(String nodeId, String catalogName, String schemaName, String tableName, String overrideInitialLoadSelect); - - public void reloadMissingForeignKeyRows(String nodeId, long dataId); - - /** - * Sends a SQL command to the remote node for execution by creating a SQL event that is synced like other data - * - * @param nodeId the remote node where the SQL statement will be executed - * @param catalogName used to find the sym_trigger entry for table that will be associated with this event - * @param schemaName used to find the sym_trigger entry for table that will be associated with this event - * @param tableName used to find the sym_trigger entry for table that will be associated with this event - * @param sql the SQL statement to run on the remote node database - * @return message string indicating success or error - */ - public String sendSQL(String nodeId, String catalogName, String schemaName, String tableName, String sql); - - public void insertReloadEvents(Node targetNode, boolean reverse, ProcessInfo processInfo); - - public void insertReloadEvents(Node targetNode, boolean reverse, List reloadRequests, ProcessInfo processInfo); - - public boolean insertReloadEvent(TableReloadRequest request, boolean deleteAtClient); - - public long insertReloadEvent(ISqlTransaction transaction, Node targetNode, - TriggerRouter triggerRouter, TriggerHistory triggerHistory, String overrideInitialLoadSelect, boolean isLoad, long loadId, String createBy, Status status); - - public void sendScript(String nodeId, String script, boolean isLoad); - - public boolean sendSchema(String nodeId, String catalogName, String schemaName, - String tableName, boolean isLoad); - - /** - * Update {@link Node} information for this node and call {@link IHeartbeatListener}s. - */ - public void heartbeat(boolean force); - - public void insertHeartbeatEvent(Node node, boolean isReload); - - public long insertData(Data data); - - public void insertDataEvents(ISqlTransaction transaction, List events); - - public void insertDataAndDataEventAndOutgoingBatch(Data data, String channelId, List nodes, String routerId, boolean isLoad, long loadId, String createBy); - - public long insertDataAndDataEventAndOutgoingBatch(ISqlTransaction transaction, Data data, - String nodeId, String routerId, boolean isLoad, long loadId, String createBy, Status status); - - public long insertDataAndDataEventAndOutgoingBatch(Data data, String nodeId, String routerId, boolean isLoad, long loadId, String createBy); - - public void insertSqlEvent(ISqlTransaction transaction, Node targetNode, String sql, boolean isLoad, long loadId, String createBy); - - public void insertSqlEvent(Node targetNode, String sql, boolean isLoad, long loadId, String createBy); - - public void insertScriptEvent(ISqlTransaction transaction, String channelId, - Node targetNode, String script, boolean isLoad, long loadId, String createBy); - - public void insertCreateEvent(Node targetNode, TriggerHistory triggerHistory, String routerId, boolean isLoad, long loadId, String createBy); - - /** - * Count the number of data ids in a range - */ - public int countDataInRange(long firstDataId, long secondDataId); - - public long countDataGapsByStatus(DataGap.Status status); - - public List findDataGapsByStatus(DataGap.Status status); - - public List findDataGaps(); - - public Date findCreateTimeOfEvent(long dataId); - - public Date findCreateTimeOfData(long dataId); - - public Date findNextCreateTimeOfDataStartingAt(long dataId); - - public Data createData(String catalogName, String schemaName, String tableName); - - public Data createData(String catalogName, String schemaName, String tableName, String whereClause); - - public Data createData(ISqlTransaction transaction, String catalogName, String schemaName, String tableName, String whereClause); - - public Data mapData(Row row); - - public List listDataIds(long batchId, String nodeId); - - public List listData(long batchId, String nodeId, long startDataId, String channelId, int maxRowsToRetrieve); - - public void updateDataGap(DataGap gap, DataGap.Status status); - - public void insertDataGap(DataGap gap); - - public void insertDataGap(ISqlTransaction transaction, DataGap gap); - - public void deleteDataGap(ISqlTransaction transaction, DataGap gap); - - public void deleteAllDataGaps(ISqlTransaction transaction); - - public void deleteDataGap(DataGap gap); - - public void deleteCapturedConfigChannelData(); - - public long findMaxDataId(); - - public Data findData(long dataId); - - public long findMinDataId(); - - public ISqlReadCursor selectDataFor(Batch batch); - - public ISqlReadCursor selectDataFor(Long batchId, String channelId); - - public Map getLastDataCaptureByChannel(); - +/** + * Licensed to JumpMind Inc under one or more contributor + * license agreements. See the NOTICE file distributed + * with this work for additional information regarding + * copyright ownership. JumpMind Inc licenses this file + * to you under the GNU General Public License, version 3.0 (GPLv3) + * (the "License"); you may not use this file except in compliance + * with the License. + * + * You should have received a copy of the GNU General Public License, + * version 3.0 (GPLv3) along with this library; if not, see + * . + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.jumpmind.symmetric.service; + +import java.util.Date; +import java.util.List; +import java.util.Map; + +import org.jumpmind.db.sql.ISqlReadCursor; +import org.jumpmind.db.sql.ISqlTransaction; +import org.jumpmind.db.sql.Row; +import org.jumpmind.symmetric.ext.IHeartbeatListener; +import org.jumpmind.symmetric.io.data.Batch; +import org.jumpmind.symmetric.model.Data; +import org.jumpmind.symmetric.model.DataEvent; +import org.jumpmind.symmetric.model.DataGap; +import org.jumpmind.symmetric.model.Node; +import org.jumpmind.symmetric.model.AbstractBatch.Status; +import org.jumpmind.symmetric.model.ProcessInfo; +import org.jumpmind.symmetric.model.TableReloadRequest; +import org.jumpmind.symmetric.model.TableReloadRequestKey; +import org.jumpmind.symmetric.model.TriggerHistory; +import org.jumpmind.symmetric.model.TriggerRouter; + +/** + * This service provides an API to access and update {@link Data}. + */ +public interface IDataService { + + public void insertTableReloadRequest(TableReloadRequest request); + + public TableReloadRequest getTableReloadRequest(TableReloadRequestKey key); + + public List getTableReloadRequestToProcess(final String sourceNodeId); + + public String reloadNode(String nodeId, boolean reverseLoad, String createBy); + + public String reloadTable(String nodeId, String catalogName, String schemaName, String tableName); + + public String reloadTable(String nodeId, String catalogName, String schemaName, String tableName, String overrideInitialLoadSelect); + + public void reloadMissingForeignKeyRows(String nodeId, long dataId); + + /** + * Sends a SQL command to the remote node for execution by creating a SQL event that is synced like other data + * + * @param nodeId the remote node where the SQL statement will be executed + * @param catalogName used to find the sym_trigger entry for table that will be associated with this event + * @param schemaName used to find the sym_trigger entry for table that will be associated with this event + * @param tableName used to find the sym_trigger entry for table that will be associated with this event + * @param sql the SQL statement to run on the remote node database + * @return message string indicating success or error + */ + public String sendSQL(String nodeId, String catalogName, String schemaName, String tableName, String sql); + + public void insertReloadEvents(Node targetNode, boolean reverse, ProcessInfo processInfo); + + public void insertReloadEvents(Node targetNode, boolean reverse, List reloadRequests, ProcessInfo processInfo); + + public boolean insertReloadEvent(TableReloadRequest request, boolean deleteAtClient); + + public long insertReloadEvent(ISqlTransaction transaction, Node targetNode, + TriggerRouter triggerRouter, TriggerHistory triggerHistory, String overrideInitialLoadSelect, boolean isLoad, long loadId, String createBy, Status status); + + public void sendScript(String nodeId, String script, boolean isLoad); + + public boolean sendSchema(String nodeId, String catalogName, String schemaName, + String tableName, boolean isLoad); + + /** + * Update {@link Node} information for this node and call {@link IHeartbeatListener}s. + */ + public void heartbeat(boolean force); + + public void insertHeartbeatEvent(Node node, boolean isReload); + + public long insertData(Data data); + + public void insertDataEvents(ISqlTransaction transaction, List events); + + public void insertDataAndDataEventAndOutgoingBatch(Data data, String channelId, List nodes, String routerId, boolean isLoad, long loadId, String createBy); + + public long insertDataAndDataEventAndOutgoingBatch(ISqlTransaction transaction, Data data, + String nodeId, String routerId, boolean isLoad, long loadId, String createBy, Status status); + + public long insertDataAndDataEventAndOutgoingBatch(Data data, String nodeId, String routerId, boolean isLoad, long loadId, String createBy); + + public void insertSqlEvent(ISqlTransaction transaction, Node targetNode, String sql, boolean isLoad, long loadId, String createBy); + + public void insertSqlEvent(Node targetNode, String sql, boolean isLoad, long loadId, String createBy); + + public void insertScriptEvent(ISqlTransaction transaction, String channelId, + Node targetNode, String script, boolean isLoad, long loadId, String createBy); + + public void insertCreateEvent(Node targetNode, TriggerHistory triggerHistory, String routerId, boolean isLoad, long loadId, String createBy); + + /** + * Count the number of data ids in a range + */ + public int countDataInRange(long firstDataId, long secondDataId); + + public long countDataGapsByStatus(DataGap.Status status); + + public List findDataGapsByStatus(DataGap.Status status); + + public List findDataGaps(); + + public Date findCreateTimeOfEvent(long dataId); + + public Date findCreateTimeOfData(long dataId); + + public Date findNextCreateTimeOfDataStartingAt(long dataId); + + public Data createData(String catalogName, String schemaName, String tableName); + + public Data createData(String catalogName, String schemaName, String tableName, String whereClause); + + public Data createData(ISqlTransaction transaction, String catalogName, String schemaName, String tableName, String whereClause); + + public Data mapData(Row row); + + public List listDataIds(long batchId, String nodeId); + + public List listData(long batchId, String nodeId, long startDataId, String channelId, int maxRowsToRetrieve); + + public void updateDataGap(DataGap gap, DataGap.Status status); + + public void insertDataGap(DataGap gap); + + public void insertDataGap(ISqlTransaction transaction, DataGap gap); + + public void deleteDataGap(ISqlTransaction transaction, DataGap gap); + + public void deleteAllDataGaps(ISqlTransaction transaction); + + public void deleteDataGap(DataGap gap); + + public void deleteCapturedConfigChannelData(); + + public long findMaxDataId(); + + public Data findData(long dataId); + + public long findMinDataId(); + + public ISqlReadCursor selectDataFor(Batch batch); + + public ISqlReadCursor selectDataFor(Long batchId, String channelId); + + public Map getLastDataCaptureByChannel(); + } \ No newline at end of file diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/IIncomingBatchService.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/IIncomingBatchService.java index 484097d2ab..24667dd444 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/IIncomingBatchService.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/IIncomingBatchService.java @@ -1,60 +1,60 @@ -/** - * Licensed to JumpMind Inc under one or more contributor - * license agreements. See the NOTICE file distributed - * with this work for additional information regarding - * copyright ownership. JumpMind Inc licenses this file - * to you under the GNU General Public License, version 3.0 (GPLv3) - * (the "License"); you may not use this file except in compliance - * with the License. - * - * You should have received a copy of the GNU General Public License, - * version 3.0 (GPLv3) along with this library; if not, see - * . - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.jumpmind.symmetric.service; - -import java.util.Date; -import java.util.List; -import java.util.Map; - -import org.jumpmind.db.sql.ISqlTransaction; -import org.jumpmind.symmetric.model.BatchId; -import org.jumpmind.symmetric.model.IncomingBatch; -import org.jumpmind.symmetric.model.IncomingBatchSummary; -import org.jumpmind.symmetric.model.IncomingBatch.Status; - +/** + * Licensed to JumpMind Inc under one or more contributor + * license agreements. See the NOTICE file distributed + * with this work for additional information regarding + * copyright ownership. JumpMind Inc licenses this file + * to you under the GNU General Public License, version 3.0 (GPLv3) + * (the "License"); you may not use this file except in compliance + * with the License. + * + * You should have received a copy of the GNU General Public License, + * version 3.0 (GPLv3) along with this library; if not, see + * . + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.jumpmind.symmetric.service; + +import java.util.Date; +import java.util.List; +import java.util.Map; + +import org.jumpmind.db.sql.ISqlTransaction; +import org.jumpmind.symmetric.model.BatchId; +import org.jumpmind.symmetric.model.IncomingBatch; +import org.jumpmind.symmetric.model.IncomingBatchSummary; +import org.jumpmind.symmetric.model.AbstractBatch.Status; + /** * This service provides an API to access to the incoming batch table. */ -public interface IIncomingBatchService { - - public List getNodesInError(); +public interface IIncomingBatchService { + + public List getNodesInError(); public int countIncomingBatchesInError(); public int countIncomingBatchesInError(String channelId); - - public IncomingBatch findIncomingBatch(long batchId, String nodeId); - - public void refreshIncomingBatch(IncomingBatch batch); - - public List findIncomingBatchErrors(int maxRows); - - public boolean acquireIncomingBatch(IncomingBatch batch); - - public void insertIncomingBatch(ISqlTransaction transaction, IncomingBatch batch); - + + public IncomingBatch findIncomingBatch(long batchId, String nodeId); + + public void refreshIncomingBatch(IncomingBatch batch); + + public List findIncomingBatchErrors(int maxRows); + + public boolean acquireIncomingBatch(IncomingBatch batch); + + public void insertIncomingBatch(ISqlTransaction transaction, IncomingBatch batch); + public void insertIncomingBatch(IncomingBatch batch); - public int updateIncomingBatch(ISqlTransaction transaction, IncomingBatch batch); + public int updateIncomingBatch(ISqlTransaction transaction, IncomingBatch batch); public int updateIncomingBatch(IncomingBatch batch); @@ -64,23 +64,23 @@ public List listIncomingBatchTimes(List nodeIds, List chan List statuses, List loads, boolean ascending); public List listIncomingBatches(List nodeIds, List channels, - List statuses, List loads, Date startAtCreateTime, int maxRowsToRetrieve, boolean ascending); + List statuses, List loads, Date startAtCreateTime, int maxRowsToRetrieve, boolean ascending); - public void markIncomingBatchesOk(String nodeId); - + public void markIncomingBatchesOk(String nodeId); + public void removingIncomingBatches(String nodeId); - public List listIncomingBatchesInErrorFor(String nodeId); - + public List listIncomingBatchesInErrorFor(String nodeId); + public boolean isRecordOkBatchesEnabled(); - - public Map findMaxBatchIdsByChannel(); - - public List findIncomingBatchSummaryByChannel(Status... statuses); - - public List findIncomingBatchSummary(Status... statuses); - - public Map findLastUpdatedByChannel(); - - public List getAllBatches(); + + public Map findMaxBatchIdsByChannel(); + + public List findIncomingBatchSummaryByChannel(Status... statuses); + + public List findIncomingBatchSummary(Status... statuses); + + public Map findLastUpdatedByChannel(); + + public List getAllBatches(); } \ No newline at end of file diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/AcknowledgeService.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/AcknowledgeService.java index 32cb36a1fd..6a3eddb12b 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/AcknowledgeService.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/AcknowledgeService.java @@ -33,7 +33,7 @@ import org.jumpmind.symmetric.model.BatchAckResult; import org.jumpmind.symmetric.model.Channel; import org.jumpmind.symmetric.model.OutgoingBatch; -import org.jumpmind.symmetric.model.OutgoingBatch.Status; +import org.jumpmind.symmetric.model.AbstractBatch.Status; import org.jumpmind.symmetric.service.IAcknowledgeService; import org.jumpmind.symmetric.service.IOutgoingBatchService; import org.jumpmind.symmetric.service.IRegistrationService; @@ -94,7 +94,7 @@ public BatchAckResult ack(final BatchAck batch) { } outgoingBatch.setNetworkMillis(batch.getNetworkMillis()); outgoingBatch.setFilterMillis(batch.getFilterMillis()); - outgoingBatch.setLoadMillis(batch.getDatabaseMillis()); + outgoingBatch.setLoadMillis(batch.getLoadMillis()); outgoingBatch.setLoadStartTime(new Date(batch.getStartTime())); outgoingBatch.setSqlCode(batch.getSqlCode()); outgoingBatch.setSqlState(batch.getSqlState()); diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/DataExtractorService.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/DataExtractorService.java index 01b889cc6d..b2d9ed979a 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/DataExtractorService.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/DataExtractorService.java @@ -111,7 +111,7 @@ import org.jumpmind.symmetric.model.NodeGroupLink; import org.jumpmind.symmetric.model.NodeGroupLinkAction; import org.jumpmind.symmetric.model.OutgoingBatch; -import org.jumpmind.symmetric.model.OutgoingBatch.Status; +import org.jumpmind.symmetric.model.AbstractBatch.Status; import org.jumpmind.symmetric.model.OutgoingBatchWithPayload; import org.jumpmind.symmetric.model.OutgoingBatches; import org.jumpmind.symmetric.model.ProcessInfo; @@ -576,7 +576,7 @@ protected List extract(final ProcessInfo processInfo, final Node currentBatch = activeBatches.get(i); processInfo.setCurrentLoadId(currentBatch.getLoadId()); - processInfo.setDataCount(currentBatch.getDataEventCount()); + processInfo.setDataCount(currentBatch.getDataRowCount()); processInfo.setCurrentBatchId(currentBatch.getBatchId()); channelsProcessed.add(currentBatch.getChannelId()); @@ -806,7 +806,7 @@ final protected boolean changeBatchStatus(Status status, OutgoingBatch currentBa currentBatch.getLastUpdatedTime() == null || System.currentTimeMillis() - batchStatusUpdateMillis >= currentBatch.getLastUpdatedTime().getTime() || channel.isReloadFlag() || - currentBatch.getDataEventCount() > batchStatusUpdateDataCount) { + currentBatch.getDataRowCount() > batchStatusUpdateDataCount) { outgoingBatchService.updateOutgoingBatch(currentBatch); return true; } @@ -919,18 +919,18 @@ protected OutgoingBatch extractOutgoingBatch(ProcessInfo processInfo, Node targe } if (updateBatchStatistics) { - long dataEventCount = currentBatch.getDataEventCount(); - long insertEventCount = currentBatch.getInsertEventCount(); + long dataEventCount = currentBatch.getDataRowCount(); + long insertEventCount = currentBatch.getDataInsertRowCount(); currentBatch = requeryIfEnoughTimeHasPassed(ts, currentBatch); // preserve in the case of a reload event - if (dataEventCount > currentBatch.getDataEventCount()) { - currentBatch.setDataEventCount(dataEventCount); + if (dataEventCount > currentBatch.getDataRowCount()) { + currentBatch.setDataRowCount(dataEventCount); } // preserve in the case of a reload event - if (insertEventCount > currentBatch.getInsertEventCount()) { - currentBatch.setInsertEventCount(insertEventCount); + if (insertEventCount > currentBatch.getDataInsertRowCount()) { + currentBatch.setDataInsertRowCount(insertEventCount); } // only update the current batch after we have possibly @@ -1186,7 +1186,7 @@ protected void transferFromStaging(ExtractMode mode, BatchType batchType, Outgoi } } - statisticManager.incrementDataSent(batch.getChannelId(), batch.getDataEventCount()); + statisticManager.incrementDataSent(batch.getChannelId(), batch.getDataRowCount()); statisticManager.incrementDataBytesSent(batch.getChannelId(), totalBytesRead); if (log.isDebugEnabled() && totalThrottleTime > 0) { @@ -1766,7 +1766,7 @@ public CsvData next() { || symmetricDialect.getName().equals( DatabaseNamesConstants.MSSQL2008)); - outgoingBatch.incrementDataEventCount(); + outgoingBatch.incrementDataRowCount(); } else { log.error( "Could not locate a trigger with the id of {} for {}. It was recorded in the hist table with a hist id of {}", @@ -1930,7 +1930,7 @@ public CsvData next() { .isNotBlank(triggerRouter.getInitialLoadSelect()), triggerRouter)); if (data != null && outgoingBatch != null && !outgoingBatch.isExtractJobFlag()) { - outgoingBatch.incrementDataEventCount(); + outgoingBatch.incrementDataRowCount(); outgoingBatch.incrementEventCount(data.getDataEventType()); } diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/DataLoaderService.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/DataLoaderService.java index a183de0089..b89315297c 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/DataLoaderService.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/DataLoaderService.java @@ -95,7 +95,7 @@ import org.jumpmind.symmetric.model.Channel; import org.jumpmind.symmetric.model.ChannelMap; import org.jumpmind.symmetric.model.IncomingBatch; -import org.jumpmind.symmetric.model.IncomingBatch.Status; +import org.jumpmind.symmetric.model.AbstractBatch.Status; import org.jumpmind.symmetric.model.IncomingError; import org.jumpmind.symmetric.model.LoadFilter; import org.jumpmind.symmetric.model.LoadFilter.LoadFilterType; @@ -423,7 +423,7 @@ private void logDataReceivedFromPush(Node sourceNode, List batchL for (IncomingBatch incomingBatch : batchList) { if (incomingBatch.getStatus() == Status.OK) { okBatchesCount++; - okDataCount += incomingBatch.getStatementCount(); + okDataCount += incomingBatch.getLoadRowCount(); } else if (incomingBatch.getStatus() == Status.ER) { errorBatchesCount++; } @@ -1078,7 +1078,7 @@ public void batchSuccessful(DataContext context) { this.currentBatch.setValues(context.getReader().getStatistics().get(batch), context .getWriter().getStatistics().get(batch), true); statisticManager.incrementDataLoaded(this.currentBatch.getChannelId(), - this.currentBatch.getStatementCount()); + this.currentBatch.getLoadRowCount()); statisticManager.incrementDataBytesLoaded(this.currentBatch.getChannelId(), this.currentBatch.getByteCount()); Status oldStatus = this.currentBatch.getStatus(); @@ -1128,7 +1128,7 @@ public void batchInError(DataContext context, Throwable ex) { this.currentBatch.setValues(context.getReader().getStatistics().get(batch), context.getWriter().getStatistics().get(batch), false); statisticManager.incrementDataLoaded(this.currentBatch.getChannelId(), - this.currentBatch.getStatementCount()); + this.currentBatch.getLoadRowCount()); statisticManager.incrementDataBytesLoaded(this.currentBatch.getChannelId(), this.currentBatch.getByteCount()); statisticManager.incrementDataLoadedErrors(this.currentBatch.getChannelId(), 1); diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/DataService.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/DataService.java index 42b2176c84..459598e511 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/DataService.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/DataService.java @@ -74,7 +74,7 @@ import org.jumpmind.symmetric.model.NodeGroupLinkAction; import org.jumpmind.symmetric.model.NodeSecurity; import org.jumpmind.symmetric.model.OutgoingBatch; -import org.jumpmind.symmetric.model.OutgoingBatch.Status; +import org.jumpmind.symmetric.model.AbstractBatch.Status; import org.jumpmind.symmetric.model.ProcessInfo; import org.jumpmind.symmetric.model.Router; import org.jumpmind.symmetric.model.TableReloadRequest; @@ -1353,7 +1353,7 @@ protected long insertDataEventAndOutgoingBatch(ISqlTransaction transaction, long outgoingBatch.setCreateBy(createBy); outgoingBatch.setLoadFlag(isLoad); outgoingBatch.incrementEventCount(eventType); - outgoingBatch.incrementDataEventCount(); + outgoingBatch.incrementDataRowCount(); if (tableName != null) { outgoingBatch.incrementTableCount(tableName.toLowerCase()); } diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/FileSyncService.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/FileSyncService.java index 68badb9375..49febaf555 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/FileSyncService.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/FileSyncService.java @@ -75,7 +75,7 @@ import org.jumpmind.symmetric.model.NodeCommunication.CommunicationType; import org.jumpmind.symmetric.model.NodeSecurity; import org.jumpmind.symmetric.model.OutgoingBatch; -import org.jumpmind.symmetric.model.OutgoingBatch.Status; +import org.jumpmind.symmetric.model.AbstractBatch.Status; import org.jumpmind.symmetric.model.OutgoingBatches; import org.jumpmind.symmetric.model.ProcessInfo; import org.jumpmind.symmetric.model.ProcessInfoKey; @@ -914,7 +914,7 @@ protected List processZip(InputStream is, String sourceNodeId, updateFileIncoming(sourceNodeId, filesToEventType); } incomingBatch - .setStatementCount(filesToEventType != null ? filesToEventType + .setLoadRowCount(filesToEventType != null ? filesToEventType .size() : 0); } else { throw new RuntimeException( diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/IncomingBatchService.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/IncomingBatchService.java index caab23855c..7aa75cb431 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/IncomingBatchService.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/IncomingBatchService.java @@ -39,7 +39,8 @@ import org.jumpmind.symmetric.db.ISymmetricDialect; import org.jumpmind.symmetric.model.BatchId; import org.jumpmind.symmetric.model.IncomingBatch; -import org.jumpmind.symmetric.model.IncomingBatch.Status; +import org.jumpmind.symmetric.model.AbstractBatch.Status; +import org.jumpmind.symmetric.model.AbstractBatch; import org.jumpmind.symmetric.model.IncomingBatchSummary; import org.jumpmind.symmetric.service.IClusterService; import org.jumpmind.symmetric.service.IIncomingBatchService; @@ -134,7 +135,7 @@ public boolean isRecordOkBatchesEnabled() { } public List listIncomingBatchTimes(List nodeIds, List channels, - List statuses, List loads, boolean ascending) { + List statuses, List loads, boolean ascending) { String whereClause = buildBatchWhere(nodeIds, channels, statuses, loads); @@ -150,7 +151,7 @@ public List listIncomingBatchTimes(List nodeIds, List chan } public List listIncomingBatches(List nodeIds, List channels, - List statuses, List loads, Date startAtCreateTime, + List statuses, List loads, Date startAtCreateTime, final int maxRowsToRetrieve, boolean ascending) { Map params = new HashMap(); params.put("NODES", nodeIds); @@ -228,11 +229,11 @@ public boolean acquireIncomingBatch(IncomingBatch batch) { okayToProcess = false; batch.setStatus(existingBatch.getStatus()); batch.setByteCount(existingBatch.getByteCount()); - batch.setDatabaseMillis(existingBatch.getDatabaseMillis()); + batch.setLoadMillis(existingBatch.getLoadMillis()); batch.setNetworkMillis(existingBatch.getNetworkMillis()); batch.setFilterMillis(existingBatch.getFilterMillis()); batch.setSkipCount(existingBatch.getSkipCount() + 1); - batch.setStatementCount(existingBatch.getStatementCount()); + batch.setLoadRowCount(existingBatch.getLoadRowCount()); existingBatch.setSkipCount(existingBatch.getSkipCount() + 1); log.info("Skipping batch {}", batch.getNodeBatchId()); @@ -258,9 +259,9 @@ public void insertIncomingBatch(ISqlTransaction transaction, IncomingBatch batch getSql("insertIncomingBatchSql"), new Object[] { batch.getBatchId(), batch.getNodeId(), batch.getChannelId(), batch.getStatus().name(), batch.getNetworkMillis(), - batch.getFilterMillis(), batch.getDatabaseMillis(), + batch.getFilterMillis(), batch.getLoadMillis(), batch.getFailedRowNumber(), batch.getFailedLineNumber(), - batch.getByteCount(), batch.getStatementCount(), + batch.getByteCount(), batch.getLoadRowCount(), batch.getFallbackInsertCount(), batch.getFallbackUpdateCount(), batch.getIgnoreCount(), batch.getIgnoreRowCount(), batch.getMissingDeleteCount(), batch.getSkipCount(), batch.getSqlState(), batch.getSqlCode(), @@ -338,9 +339,9 @@ public int updateIncomingBatch(ISqlTransaction transaction , IncomingBatch batch getSql("updateIncomingBatchSql"), new Object[] { batch.getStatus().name(), batch.isErrorFlag() ? 1 : 0, batch.getNetworkMillis(), batch.getFilterMillis(), - batch.getDatabaseMillis(), batch.getFailedRowNumber(), + batch.getLoadMillis(), batch.getFailedRowNumber(), batch.getFailedLineNumber(), batch.getByteCount(), - batch.getStatementCount(), batch.getFallbackInsertCount(), + batch.getLoadRowCount(), batch.getFallbackInsertCount(), batch.getFallbackUpdateCount(), batch.getIgnoreCount(), batch.getIgnoreRowCount(), batch.getMissingDeleteCount(), batch.getSkipCount(), batch.getSqlState(), batch.getSqlCode(), @@ -486,13 +487,34 @@ public IncomingBatch mapRow(Row rs) { batch.setNodeId(rs.getString("node_id")); batch.setChannelId(rs.getString("channel_id")); batch.setStatus(rs.getString("status")); + batch.setRouterMillis(rs.getLong("router_millis")); batch.setNetworkMillis(rs.getLong("network_millis")); batch.setFilterMillis(rs.getLong("filter_millis")); - batch.setDatabaseMillis(rs.getLong("database_millis")); + batch.setLoadMillis(rs.getLong("load_millis")); + batch.setExtractMillis(rs.getLong("extract_millis")); + batch.setTransformExtractMillis(rs.getLong("transform_extract_millis")); + batch.setTransformLoadMillis(rs.getLong("transform_load_millis")); batch.setFailedRowNumber(rs.getLong("failed_row_number")); batch.setFailedLineNumber(rs.getLong("failed_line_number")); batch.setByteCount(rs.getLong("byte_count")); - batch.setStatementCount(rs.getLong("statement_count")); + batch.setLoadFlag(rs.getBoolean("load_flag")); + batch.setExtractCount(rs.getLong("extract_count")); + batch.setSentCount(rs.getLong("sent_count")); + batch.setLoadCount(rs.getLong("load_count")); + batch.setDataRowCount(rs.getLong("data_row_count")); + batch.setLoadRowCount(rs.getLong("load_row_count")); + batch.setExtractRowCount(rs.getLong("extract_row_count")); + batch.setReloadRowCount(rs.getLong("reload_row_count")); + batch.setDataInsertRowCount(rs.getLong("data_insert_row_count")); + batch.setDataUpdateRowCount(rs.getLong("data_update_row_count")); + batch.setDataDeleteRowCount(rs.getLong("data_delete_row_count")); + batch.setLoadInsertRowCount(rs.getLong("load_insert_row_count")); + batch.setLoadUpdateRowCount(rs.getLong("load_update_row_count")); + batch.setLoadDeleteRowCount(rs.getLong("load_delete_row_count")); + batch.setExtractInsertRowCount(rs.getLong("extract_insert_row_count")); + batch.setExtractUpdateRowCount(rs.getLong("extract_update_row_count")); + batch.setExtractDeleteRowCount(rs.getLong("extract_delete_row_count")); + batch.setOtherRowCount(rs.getLong("other_row_count")); batch.setFallbackInsertCount(rs.getLong("fallback_insert_count")); batch.setFallbackUpdateCount(rs.getLong("fallback_update_count")); batch.setIgnoreCount(rs.getLong("ignore_count")); @@ -507,6 +529,9 @@ public IncomingBatch mapRow(Row rs) { batch.setCreateTime(rs.getDateTime("create_time")); batch.setErrorFlag(rs.getBoolean("error_flag")); batch.setSummary(rs.getString("summary")); + batch.setLoadId(rs.getLong("load_id")); + batch.setCommonFlag(rs.getBoolean("common_flag")); + batch.setFailedDataId(rs.getLong("failed_data_id")); return batch; } } diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/IncomingBatchServiceSqlMap.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/IncomingBatchServiceSqlMap.java index f604895189..6da4054f2b 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/IncomingBatchServiceSqlMap.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/IncomingBatchServiceSqlMap.java @@ -33,10 +33,18 @@ public IncomingBatchServiceSqlMap(IDatabasePlatform platform, Map= maxBatchSize && this.batches.size() > 0) { + if (this.outgoingBatch.getDataRowCount() >= maxBatchSize && this.batches.size() > 0) { this.currentDataWriter.end(table); this.currentDataWriter.end(batch, false); this.closeCurrentDataWriter(); @@ -200,12 +200,12 @@ public void end(Batch batch, boolean inError) { protected void nextBatch() { if (this.outgoingBatch != null) { this.finishedBatches.add(outgoingBatch); - rowCount += this.outgoingBatch.getDataEventCount(); + rowCount += this.outgoingBatch.getDataRowCount(); byteCount += this.outgoingBatch.getByteCount(); } this.outgoingBatch = this.batches.remove(0); - this.outgoingBatch.setDataEventCount(0); - this.outgoingBatch.setInsertEventCount(0); + this.outgoingBatch.setDataRowCount(0); + this.outgoingBatch.setDataInsertRowCount(0); if (this.finishedBatches.size() > 0) { this.outgoingBatch.setExtractCount(this.outgoingBatch.getExtractCount() + 1); } diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/OutgoingBatchService.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/OutgoingBatchService.java index 6562ea2b98..40100e2b17 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/OutgoingBatchService.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/OutgoingBatchService.java @@ -53,7 +53,7 @@ import org.jumpmind.symmetric.model.NodeHost; import org.jumpmind.symmetric.model.NodeSecurity; import org.jumpmind.symmetric.model.OutgoingBatch; -import org.jumpmind.symmetric.model.OutgoingBatch.Status; +import org.jumpmind.symmetric.model.AbstractBatch.Status; import org.jumpmind.symmetric.model.OutgoingBatchSummary; import org.jumpmind.symmetric.model.OutgoingBatches; import org.jumpmind.symmetric.model.OutgoingLoadSummary; @@ -216,9 +216,9 @@ public void updateOutgoingBatch(ISqlTransaction transaction, OutgoingBatch outgo outgoingBatch.isLoadFlag() ? 1 : 0, outgoingBatch.isErrorFlag() ? 1 : 0, outgoingBatch.getByteCount(), outgoingBatch.getExtractCount(), outgoingBatch.getSentCount(), outgoingBatch.getLoadCount(), - outgoingBatch.getDataEventCount(), outgoingBatch.getReloadEventCount(), - outgoingBatch.getInsertEventCount(), outgoingBatch.getUpdateEventCount(), - outgoingBatch.getDeleteEventCount(), outgoingBatch.getOtherEventCount(), + outgoingBatch.getDataRowCount(), outgoingBatch.getReloadRowCount(), + outgoingBatch.getDataInsertRowCount(), outgoingBatch.getDataUpdateRowCount(), + outgoingBatch.getDataDeleteRowCount(), outgoingBatch.getOtherRowCount(), outgoingBatch.getIgnoreCount(), outgoingBatch.getRouterMillis(), outgoingBatch.getNetworkMillis(), outgoingBatch.getFilterMillis(), outgoingBatch.getLoadMillis(), outgoingBatch.getExtractMillis(), @@ -269,9 +269,9 @@ public void insertOutgoingBatch(ISqlTransaction transaction, OutgoingBatch outgo transaction.prepareAndExecute(getSql("insertOutgoingBatchSql"), batchId, outgoingBatch .getNodeId(), outgoingBatch.getChannelId(), outgoingBatch.getStatus().name(), outgoingBatch.getLoadId(), outgoingBatch.isExtractJobFlag() ? 1: 0, outgoingBatch.isLoadFlag() ? 1 : 0, outgoingBatch - .isCommonFlag() ? 1 : 0, outgoingBatch.getReloadEventCount(), outgoingBatch - .getOtherEventCount(), outgoingBatch.getUpdateEventCount(), outgoingBatch.getInsertEventCount(), - outgoingBatch.getDeleteEventCount(), outgoingBatch.getLastUpdatedHostName(), + .isCommonFlag() ? 1 : 0, outgoingBatch.getReloadRowCount(), outgoingBatch + .getOtherRowCount(), outgoingBatch.getDataUpdateRowCount(), outgoingBatch.getDataInsertRowCount(), + outgoingBatch.getDataDeleteRowCount(), outgoingBatch.getLastUpdatedHostName(), outgoingBatch.getCreateBy(), outgoingBatch.getSummary()); outgoingBatch.setBatchId(batchId); } @@ -1085,18 +1085,28 @@ public OutgoingBatch mapRow(Row rs) { batch.setExtractCount(rs.getLong("extract_count")); batch.setSentCount(rs.getLong("sent_count")); batch.setLoadCount(rs.getLong("load_count")); - batch.setDataEventCount(rs.getLong("data_event_count")); - batch.setReloadEventCount(rs.getLong("reload_event_count")); - batch.setInsertEventCount(rs.getLong("insert_event_count")); - batch.setUpdateEventCount(rs.getLong("update_event_count")); - batch.setDeleteEventCount(rs.getLong("delete_event_count")); - batch.setOtherEventCount(rs.getLong("other_event_count")); + batch.setDataRowCount(rs.getLong("data_row_count")); + batch.setLoadRowCount(rs.getLong("load_row_count")); + batch.setExtractRowCount(rs.getLong("extract_row_count")); + batch.setReloadRowCount(rs.getLong("reload_row_count")); + batch.setDataInsertRowCount(rs.getLong("data_insert_row_count")); + batch.setDataUpdateRowCount(rs.getLong("data_update_row_count")); + batch.setDataDeleteRowCount(rs.getLong("data_delete_row_count")); + batch.setLoadInsertRowCount(rs.getLong("load_insert_row_count")); + batch.setLoadUpdateRowCount(rs.getLong("load_update_row_count")); + batch.setLoadDeleteRowCount(rs.getLong("load_delete_row_count")); + batch.setExtractInsertRowCount(rs.getLong("extract_insert_row_count")); + batch.setExtractUpdateRowCount(rs.getLong("extract_update_row_count")); + batch.setExtractDeleteRowCount(rs.getLong("extract_delete_row_count")); + batch.setOtherRowCount(rs.getLong("other_row_count")); batch.setIgnoreCount(rs.getLong("ignore_count")); batch.setRouterMillis(rs.getLong("router_millis")); batch.setNetworkMillis(rs.getLong("network_millis")); batch.setFilterMillis(rs.getLong("filter_millis")); batch.setLoadMillis(rs.getLong("load_millis")); batch.setExtractMillis(rs.getLong("extract_millis")); + batch.setTransformExtractMillis(rs.getLong("transform_extract_millis")); + batch.setTransformLoadMillis(rs.getLong("transform_load_millis")); batch.setExtractStartTime(rs.getDateTime("extract_start_time")); batch.setTransferStartTime(rs.getDateTime("transfer_start_time")); batch.setLoadStartTime(rs.getDateTime("load_start_time")); @@ -1114,6 +1124,12 @@ public OutgoingBatch mapRow(Row rs) { batch.setLoadId(rs.getLong("load_id")); batch.setCreateBy(rs.getString("create_by")); batch.setSummary(rs.getString("summary")); + batch.setFallbackInsertCount(rs.getLong("fallback_insert_count")); + batch.setFallbackUpdateCount(rs.getLong("fallback_update_count")); + batch.setIgnoreRowCount(rs.getLong("ignore_row_count")); + batch.setMissingDeleteCount(rs.getLong("missing_delete_count")); + batch.setSkipCount(rs.getLong("skip_count")); + } return batch; } else { diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/OutgoingBatchServiceSqlMap.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/OutgoingBatchServiceSqlMap.java index 243fb80a89..6bd8e86dbd 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/OutgoingBatchServiceSqlMap.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/OutgoingBatchServiceSqlMap.java @@ -50,14 +50,14 @@ public OutgoingBatchServiceSqlMap(IDatabasePlatform platform, putSql("insertOutgoingBatchSql", "insert into $(outgoing_batch) " - + " (batch_id, node_id, channel_id, status, load_id, extract_job_flag, load_flag, common_flag, reload_event_count, other_event_count, " - + " update_event_count, insert_event_count, delete_event_count, last_update_hostname, last_update_time, create_time, create_by, summary) " + + " (batch_id, node_id, channel_id, status, load_id, extract_job_flag, load_flag, common_flag, reload_row_count, other_row_count, " + + " data_update_row_count, data_insert_row_count, data_delete_row_count, last_update_hostname, last_update_time, create_time, create_by, summary) " + " values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, current_timestamp, current_timestamp, ?, ?) "); putSql("updateOutgoingBatchSql", "update $(outgoing_batch) set status=?, load_id=?, extract_job_flag=?, load_flag=?, error_flag=?, " - + " byte_count=?, extract_count=?, sent_count=?, load_count=?, data_event_count=?, " - + " reload_event_count=?, insert_event_count=?, update_event_count=?, delete_event_count=?, other_event_count=?, " + + " byte_count=?, extract_count=?, sent_count=?, load_count=?, data_row_count=?, " + + " reload_row_count=?, data_insert_row_count=?, data_update_row_count=?, data_delete_row_count=?, other_row_count=?, " + " ignore_count=?, router_millis=?, network_millis=?, filter_millis=?, " + " load_millis=?, extract_millis=?, extract_start_time=?, transfer_start_time=?, load_start_time=?, " + " sql_state=?, sql_code=?, sql_message=?, " @@ -94,12 +94,16 @@ public OutgoingBatchServiceSqlMap(IDatabasePlatform platform, putSql("selectOutgoingBatchPrefixSql", "select b.node_id, b.channel_id, b.status, " - + " b.byte_count, b.extract_count, b.sent_count, b.load_count, b.data_event_count, " - + " b.reload_event_count, b.insert_event_count, b.update_event_count, b.delete_event_count, b.other_event_count, " + + " b.byte_count, b.extract_count, b.sent_count, b.load_count, b.data_row_count, " + + " b.reload_row_count, b.data_insert_row_count, b.data_update_row_count, b.data_delete_row_count, b.other_row_count, " + " b.ignore_count, b.router_millis, b.network_millis, b.filter_millis, b.load_millis, b.extract_millis, " + " b.extract_start_time, b.transfer_start_time, b.load_start_time, b.sql_state, b.sql_code, " - + " b.sql_message, " - + " b.failed_data_id, b.last_update_hostname, b.last_update_time, b.create_time, b.batch_id, b.extract_job_flag, b.load_flag, b.error_flag, b.common_flag, b.load_id, b.create_by, b.summary from " + + " b.sql_message, b.load_insert_row_count, b.load_update_row_count, b.load_delete_row_count, b.load_row_count, " + + " b.extract_insert_row_count, b.extract_update_row_count, b.extract_delete_row_count, b.extract_row_count, " + + " b.transform_extract_millis, b.transform_load_millis, b.fallback_insert_count, b.fallback_update_count, " + + " b.ignore_row_count, b.missing_delete_count, b.skip_count, " + + " b.failed_data_id, b.last_update_hostname, b.last_update_time, b.create_time, b.batch_id, b.extract_job_flag, " + + " b.load_flag, b.error_flag, b.common_flag, b.load_id, b.create_by, b.summary from " + " $(outgoing_batch) b "); putSql("selectOutgoingBatchErrorsSql", " where error_flag=1 order by batch_id "); @@ -123,13 +127,13 @@ public OutgoingBatchServiceSqlMap(IDatabasePlatform platform, "select count(*) from $(outgoing_batch) where status != 'OK' and channel_id=?"); putSql("selectOutgoingBatchSummaryByStatusSql", - "select count(*) as batches, sum(data_event_count) as data, status, node_id, min(create_time) as oldest_batch_time, " + "select count(*) as batches, sum(data_row_count) as data, status, node_id, min(create_time) as oldest_batch_time, " + " max(last_update_time) as last_update_time, sum(byte_count) as total_bytes, " + " sum(router_millis + extract_millis + network_millis + filter_millis + load_millis) as total_millis " + " from $(outgoing_batch) where status in (:STATUS_LIST) group by status, node_id order by oldest_batch_time asc "); putSql("selectOutgoingBatchSummaryByStatusAndChannelSql", - "select count(*) as batches, sum(s.data_event_count) as data, s.status, s.node_id, min(s.create_time) as oldest_batch_time, s.channel_id, " + "select count(*) as batches, sum(s.data_row_count) as data, s.status, s.node_id, min(s.create_time) as oldest_batch_time, s.channel_id, " + " max(s.last_update_time) as last_update_time, b.sql_message as sql_message, min(s.batch_id) as batch_id, " + " sum(s.byte_count) as total_bytes, sum(s.router_millis + s.extract_millis + s.network_millis + s.filter_millis + s.load_millis) as total_millis, " + " sum(s.router_millis) as total_router_millis, sum(s.extract_millis) as total_extract_millis, " @@ -144,7 +148,7 @@ public OutgoingBatchServiceSqlMap(IDatabasePlatform platform, putSql("getLoadSummariesSql", "select b.load_id, b.node_id, b.status, b.create_by, max(error_flag) as error_flag, count(*) as cnt, min(b.create_time) as create_time, " + " max(b.last_update_time) as last_update_time, min(b.batch_id) as current_batch_id, " - + " min(b.data_event_count) as current_data_event_count, b.channel_id " + + " min(b.data_row_count) as current_data_event_count, b.channel_id " + "from $(outgoing_batch) b inner join " + " $(data_event) e on b.batch_id=e.batch_id inner join " + " $(data) d on d.data_id=e.data_id " @@ -204,7 +208,7 @@ public OutgoingBatchServiceSqlMap(IDatabasePlatform platform, putSql("getLoadStatusSummarySql", "select ob.load_id, count(ob.batch_id) as count, ob.status, c.queue, max(ob.last_update_time) as last_update_time, " - + " min(ob.create_time) as create_time, sum(ob.data_event_count) as data_events, sum(ob.byte_count) as byte_count, " + + " min(ob.create_time) as create_time, sum(ob.data_row_count) as data_events, sum(ob.byte_count) as byte_count, " + " min(extract_start_time) as min_extract_start_time, min(transfer_start_time) as min_transfer_start_time, " + " min(load_start_time) as min_load_start_time, " + " min(summary) as min_summary, max(summary) as max_summary, " @@ -228,9 +232,9 @@ public OutgoingBatchServiceSqlMap(IDatabasePlatform platform, putSql("copyOutgoingBatchesSql", "insert into $(outgoing_batch) " - + " (batch_id, node_id, channel_id, status, load_id, extract_job_flag, load_flag, common_flag, reload_event_count, other_event_count, " + + " (batch_id, node_id, channel_id, status, load_id, extract_job_flag, load_flag, common_flag, reload_row_count, other_row_count, " + " last_update_hostname, last_update_time, create_time, create_by) " - + " (select batch_id, ?, channel_id, 'NE', load_id, extract_job_flag, load_flag, common_flag, reload_event_count, other_event_count, " + + " (select batch_id, ?, channel_id, 'NE', load_id, extract_job_flag, load_flag, common_flag, reload_row_count, other_row_count, " + " last_update_hostname, current_timestamp, create_time, 'copy' from $(outgoing_batch) where node_id=? and channel_id=? and batch_id > ?) "); @@ -248,7 +252,7 @@ public OutgoingBatchServiceSqlMap(IDatabasePlatform platform, + " select node_id, channel_id, direction, total_rows, average_create_time, " + " average_last_update_time from ( " + " select node_id, channel_id, 'incoming' as direction, " - + " sum(statement_count) as total_rows, avg(create_time) as average_create_time, " + + " sum(load_row_count) as total_rows, avg(create_time) as average_create_time, " + " avg(last_update_time) as average_last_update_time " + " from sym_incoming_batch where status = 'OK' " + " group by node_id, channel_id order by node_id " @@ -266,7 +270,7 @@ public OutgoingBatchServiceSqlMap(IDatabasePlatform platform, + " select node_id, channel_id, direction, total_rows, average_create_time, " + " average_last_update_time from ( " + " select node_id, channel_id, 'incoming' as direction, " - + " sum(statement_count) as total_rows, avg(datediff('ms', '1970-01-01', create_time)) as average_create_time, " + + " sum(load_row_count) as total_rows, avg(datediff('ms', '1970-01-01', create_time)) as average_create_time, " + " avg(datediff('ms', '1970-01-01', last_update_time)) as average_last_update_time " + " from sym_incoming_batch where status = 'OK' " + " group by node_id, channel_id order by node_id " diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/RouterService.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/RouterService.java index a77a890778..b99fabdf6e 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/RouterService.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/service/impl/RouterService.java @@ -57,7 +57,7 @@ import org.jumpmind.symmetric.model.NodeGroupLinkAction; import org.jumpmind.symmetric.model.NodeSecurity; import org.jumpmind.symmetric.model.OutgoingBatch; -import org.jumpmind.symmetric.model.OutgoingBatch.Status; +import org.jumpmind.symmetric.model.AbstractBatch.Status; import org.jumpmind.symmetric.model.ProcessInfo; import org.jumpmind.symmetric.model.ProcessInfoKey; import org.jumpmind.symmetric.model.ProcessInfoKey.ProcessType; @@ -1034,7 +1034,7 @@ protected int insertDataEvents(ProcessInfo processInfo, ChannelRouterContext con } batch.incrementEventCount(dataMetaData.getData().getDataEventType()); - batch.incrementDataEventCount(); + batch.incrementDataRowCount(); batch.incrementTableCount(dataMetaData.getTable().getNameLowerCase()); if (!context.isProduceCommonBatches() diff --git a/symmetric-core/src/main/java/org/jumpmind/symmetric/transport/AbstractTransportManager.java b/symmetric-core/src/main/java/org/jumpmind/symmetric/transport/AbstractTransportManager.java index cfe33f396b..d06c55e3c4 100644 --- a/symmetric-core/src/main/java/org/jumpmind/symmetric/transport/AbstractTransportManager.java +++ b/symmetric-core/src/main/java/org/jumpmind/symmetric/transport/AbstractTransportManager.java @@ -1,85 +1,85 @@ -/** - * Licensed to JumpMind Inc under one or more contributor - * license agreements. See the NOTICE file distributed - * with this work for additional information regarding - * copyright ownership. JumpMind Inc licenses this file - * to you under the GNU General Public License, version 3.0 (GPLv3) - * (the "License"); you may not use this file except in compliance - * with the License. - * - * You should have received a copy of the GNU General Public License, - * version 3.0 (GPLv3) along with this library; if not, see - * . - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.jumpmind.symmetric.transport; - -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URLDecoder; -import java.net.URLEncoder; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.math.NumberUtils; -import org.jumpmind.exception.IoException; -import org.jumpmind.symmetric.common.Constants; -import org.jumpmind.symmetric.io.IoConstants; -import org.jumpmind.symmetric.model.BatchAck; -import org.jumpmind.symmetric.model.IncomingBatch; -import org.jumpmind.symmetric.model.IncomingBatch.Status; -import org.jumpmind.symmetric.service.IExtensionService; -import org.jumpmind.symmetric.web.WebConstants; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -abstract public class AbstractTransportManager { - - protected final Logger log = LoggerFactory.getLogger(getClass()); - - protected IExtensionService extensionService; - - public AbstractTransportManager() { - } - - public AbstractTransportManager(IExtensionService extensionService) { - this.extensionService = extensionService; +/** + * Licensed to JumpMind Inc under one or more contributor + * license agreements. See the NOTICE file distributed + * with this work for additional information regarding + * copyright ownership. JumpMind Inc licenses this file + * to you under the GNU General Public License, version 3.0 (GPLv3) + * (the "License"); you may not use this file except in compliance + * with the License. + * + * You should have received a copy of the GNU General Public License, + * version 3.0 (GPLv3) along with this library; if not, see + * . + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.jumpmind.symmetric.transport; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URLDecoder; +import java.net.URLEncoder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.math.NumberUtils; +import org.jumpmind.exception.IoException; +import org.jumpmind.symmetric.common.Constants; +import org.jumpmind.symmetric.io.IoConstants; +import org.jumpmind.symmetric.model.BatchAck; +import org.jumpmind.symmetric.model.IncomingBatch; +import org.jumpmind.symmetric.model.AbstractBatch.Status; +import org.jumpmind.symmetric.service.IExtensionService; +import org.jumpmind.symmetric.web.WebConstants; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +abstract public class AbstractTransportManager { + + protected final Logger log = LoggerFactory.getLogger(getClass()); + + protected IExtensionService extensionService; + + public AbstractTransportManager() { + } + + public AbstractTransportManager(IExtensionService extensionService) { + this.extensionService = extensionService; + } + + /** + * Build the url for remote node communication. Use the remote sync_url + * first, if it is null or blank, then use the registration url instead. + */ + public String resolveURL(String syncUrl, String registrationUrl) { + if (StringUtils.isBlank(syncUrl) || syncUrl.startsWith(Constants.PROTOCOL_NONE)) { + log.debug("Using the registration URL to contact the remote node because the syncURL for the node is blank"); + return registrationUrl; + } + + try { + URI uri = new URI(syncUrl); + + for (ISyncUrlExtension handler : extensionService.getExtensionPointList(ISyncUrlExtension.class)) { + syncUrl = handler.resolveUrl(uri); + uri = new URI(syncUrl); + } + } catch (URISyntaxException e) { + log.error(e.getMessage(),e); + } + return syncUrl; } - - /** - * Build the url for remote node communication. Use the remote sync_url - * first, if it is null or blank, then use the registration url instead. - */ - public String resolveURL(String syncUrl, String registrationUrl) { - if (StringUtils.isBlank(syncUrl) || syncUrl.startsWith(Constants.PROTOCOL_NONE)) { - log.debug("Using the registration URL to contact the remote node because the syncURL for the node is blank"); - return registrationUrl; - } - - try { - URI uri = new URI(syncUrl); - - for (ISyncUrlExtension handler : extensionService.getExtensionPointList(ISyncUrlExtension.class)) { - syncUrl = handler.resolveUrl(uri); - uri = new URI(syncUrl); - } - } catch (URISyntaxException e) { - log.error(e.getMessage(),e); - } - return syncUrl; - } - + protected String getAcknowledgementData(boolean requires13Format, String nodeId, List list) throws IOException { StringBuilder builder = new StringBuilder(); @@ -88,8 +88,8 @@ protected String getAcknowledgementData(boolean requires13Format, String nodeId, long batchId = batch.getBatchId(); Object value = null; if (batch.getStatus() == Status.OK) { - value = WebConstants.ACK_BATCH_OK; - } else if (batch.getStatus() == Status.RS) { + value = WebConstants.ACK_BATCH_OK; + } else if (batch.getStatus() == Status.RS) { value = WebConstants.ACK_BATCH_RESEND; } else { value = batch.getFailedRowNumber(); @@ -99,9 +99,9 @@ protected String getAcknowledgementData(boolean requires13Format, String nodeId, append(builder, WebConstants.ACK_NETWORK_MILLIS + batchId, batch.getNetworkMillis()); append(builder, WebConstants.ACK_FILTER_MILLIS + batchId, batch.getFilterMillis()); append(builder, WebConstants.ACK_DATABASE_MILLIS + batchId, - batch.getDatabaseMillis()); - append(builder, WebConstants.ACK_START_TIME + batchId, - batch.getStartTime()); + batch.getLoadMillis()); + append(builder, WebConstants.ACK_START_TIME + batchId, + batch.getStartTime()); append(builder, WebConstants.ACK_BYTE_COUNT + batchId, batch.getByteCount()); if (batch.getIgnoreCount() > 0) { @@ -126,8 +126,8 @@ protected String getAcknowledgementData(boolean requires13Format, String nodeId, } } return builder.toString(); - } - + } + protected static void append(StringBuilder builder, String name, Object value) { try { int len = builder.length(); @@ -142,88 +142,88 @@ protected static void append(StringBuilder builder, String name, Object value) { } catch (IOException ex) { throw new IoException(ex); } - } - - public List readAcknowledgement(String parameterString1, String parameterString2) throws IOException { - return readAcknowledgement(parameterString1 + "&" + parameterString2); - } - - public List readAcknowledgement(String parameterString) throws IOException { - Map parameters = getParametersFromQueryUrl(parameterString.replace("\n", "")); - return readAcknowledgement(parameters); - } - - public static List readAcknowledgement(Map parameters) { - List batches = new ArrayList(); - for (String parameterName : parameters.keySet()) { - if (parameterName.startsWith(WebConstants.ACK_BATCH_NAME)) { - long batchId = NumberUtils.toLong(parameterName.substring(WebConstants.ACK_BATCH_NAME.length())); - BatchAck batchInfo = getBatchInfo(parameters, batchId); - batches.add(batchInfo); - } - } - return batches; - } - - private static BatchAck getBatchInfo(Map parameters, long batchId) { + } + + public List readAcknowledgement(String parameterString1, String parameterString2) throws IOException { + return readAcknowledgement(parameterString1 + "&" + parameterString2); + } + + public List readAcknowledgement(String parameterString) throws IOException { + Map parameters = getParametersFromQueryUrl(parameterString.replace("\n", "")); + return readAcknowledgement(parameters); + } + + public static List readAcknowledgement(Map parameters) { + List batches = new ArrayList(); + for (String parameterName : parameters.keySet()) { + if (parameterName.startsWith(WebConstants.ACK_BATCH_NAME)) { + long batchId = NumberUtils.toLong(parameterName.substring(WebConstants.ACK_BATCH_NAME.length())); + BatchAck batchInfo = getBatchInfo(parameters, batchId); + batches.add(batchInfo); + } + } + return batches; + } + + private static BatchAck getBatchInfo(Map parameters, long batchId) { BatchAck batchInfo = new BatchAck(batchId); String nodeId = getParam(parameters, WebConstants.ACK_NODE_ID + batchId); if (StringUtils.isBlank(nodeId)) { nodeId = getParam(parameters, WebConstants.NODE_ID); - } - batchInfo.setNodeId(nodeId); - batchInfo.setNetworkMillis(getParamAsNum(parameters, WebConstants.ACK_NETWORK_MILLIS + batchId)); - batchInfo.setFilterMillis(getParamAsNum(parameters, WebConstants.ACK_FILTER_MILLIS + batchId)); - batchInfo.setDatabaseMillis(getParamAsNum(parameters, WebConstants.ACK_DATABASE_MILLIS + batchId)); + } + batchInfo.setNodeId(nodeId); + batchInfo.setNetworkMillis(getParamAsNum(parameters, WebConstants.ACK_NETWORK_MILLIS + batchId)); + batchInfo.setFilterMillis(getParamAsNum(parameters, WebConstants.ACK_FILTER_MILLIS + batchId)); + batchInfo.setLoadMillis(getParamAsNum(parameters, WebConstants.ACK_DATABASE_MILLIS + batchId)); batchInfo.setByteCount(getParamAsNum(parameters, WebConstants.ACK_BYTE_COUNT + batchId)); - batchInfo.setIgnored(getParamAsBoolean(parameters, WebConstants.ACK_IGNORE_COUNT + batchId)); - String status = getParam(parameters, WebConstants.ACK_BATCH_NAME + batchId, "").trim(); - batchInfo.setOk(status.equalsIgnoreCase(WebConstants.ACK_BATCH_OK)); - batchInfo.setResend(status.equalsIgnoreCase(WebConstants.ACK_BATCH_RESEND)); - batchInfo.setStartTime(getParamAsNum(parameters, WebConstants.ACK_START_TIME + batchId)); - if (!batchInfo.isOk()) { - batchInfo.setErrorLine(NumberUtils.toLong(status)); - batchInfo.setSqlState(getParam(parameters, WebConstants.ACK_SQL_STATE + batchId)); - batchInfo.setSqlCode((int) getParamAsNum(parameters, WebConstants.ACK_SQL_CODE + batchId)); - batchInfo.setSqlMessage(getParam(parameters, WebConstants.ACK_SQL_MESSAGE + batchId)); - } - return batchInfo; - } - - protected static Map getParametersFromQueryUrl(String parameterString) throws IOException { - Map parameters = new HashMap(); - String[] tokens = parameterString.split("&"); - for (String param : tokens) { - String[] nameValuePair = param.split("="); - if (nameValuePair.length == 2) { - parameters.put(nameValuePair[0], URLDecoder.decode(nameValuePair[1], IoConstants.ENCODING)); - } - } - return parameters; - } - - private static long getParamAsNum(Map parameters, String parameterName) { - return NumberUtils.toLong(getParam(parameters, parameterName)); + batchInfo.setIgnored(getParamAsBoolean(parameters, WebConstants.ACK_IGNORE_COUNT + batchId)); + String status = getParam(parameters, WebConstants.ACK_BATCH_NAME + batchId, "").trim(); + batchInfo.setOk(status.equalsIgnoreCase(WebConstants.ACK_BATCH_OK)); + batchInfo.setResend(status.equalsIgnoreCase(WebConstants.ACK_BATCH_RESEND)); + batchInfo.setStartTime(getParamAsNum(parameters, WebConstants.ACK_START_TIME + batchId)); + if (!batchInfo.isOk()) { + batchInfo.setErrorLine(NumberUtils.toLong(status)); + batchInfo.setSqlState(getParam(parameters, WebConstants.ACK_SQL_STATE + batchId)); + batchInfo.setSqlCode((int) getParamAsNum(parameters, WebConstants.ACK_SQL_CODE + batchId)); + batchInfo.setSqlMessage(getParam(parameters, WebConstants.ACK_SQL_MESSAGE + batchId)); + } + return batchInfo; + } + + protected static Map getParametersFromQueryUrl(String parameterString) throws IOException { + Map parameters = new HashMap(); + String[] tokens = parameterString.split("&"); + for (String param : tokens) { + String[] nameValuePair = param.split("="); + if (nameValuePair.length == 2) { + parameters.put(nameValuePair[0], URLDecoder.decode(nameValuePair[1], IoConstants.ENCODING)); + } + } + return parameters; + } + + private static long getParamAsNum(Map parameters, String parameterName) { + return NumberUtils.toLong(getParam(parameters, parameterName)); } private static boolean getParamAsBoolean(Map parameters, String parameterName) { return getParamAsNum(parameters, parameterName) > 0; - } - - private static String getParam(Map parameters, String parameterName, String defaultValue) { - String value = getParam(parameters, parameterName); - return value == null ? defaultValue : value; - } - - private static String getParam(Map parameters, String parameterName) { - Object value = parameters.get(parameterName); - if (value instanceof String[]) { - String[] arrayValue = (String[]) value; - if (arrayValue.length > 0) { - value = StringUtils.trim(arrayValue[0]); - } - } - return (String) value; - } - + } + + private static String getParam(Map parameters, String parameterName, String defaultValue) { + String value = getParam(parameters, parameterName); + return value == null ? defaultValue : value; + } + + private static String getParam(Map parameters, String parameterName) { + Object value = parameters.get(parameterName); + if (value instanceof String[]) { + String[] arrayValue = (String[]) value; + if (arrayValue.length > 0) { + value = StringUtils.trim(arrayValue[0]); + } + } + return (String) value; + } + } diff --git a/symmetric-core/src/main/resources/symmetric-schema.xml b/symmetric-core/src/main/resources/symmetric-schema.xml index d68397ef60..598362484f 100644 --- a/symmetric-core/src/main/resources/symmetric-schema.xml +++ b/symmetric-core/src/main/resources/symmetric-schema.xml @@ -234,24 +234,11 @@ - + - - - - - - - - - - - - - @@ -259,6 +246,43 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -583,48 +607,61 @@
- + - - - - - + + + + + + + + + - - - - - - - - + + + + + + + + + + + + + + + - + + - + + + + + + + + - + + - - - - - - - - - + + + - diff --git a/symmetric-core/src/test/java/org/jumpmind/symmetric/model/OutgoingBatchesTest.java b/symmetric-core/src/test/java/org/jumpmind/symmetric/model/OutgoingBatchesTest.java index ba567e1580..a8d8c72df9 100644 --- a/symmetric-core/src/test/java/org/jumpmind/symmetric/model/OutgoingBatchesTest.java +++ b/symmetric-core/src/test/java/org/jumpmind/symmetric/model/OutgoingBatchesTest.java @@ -26,7 +26,7 @@ import java.util.List; import java.util.Set; -import org.jumpmind.symmetric.model.OutgoingBatch.Status; +import org.jumpmind.symmetric.model.AbstractBatch.Status; import org.jumpmind.util.AppUtils; import static org.junit.Assert.*; import org.junit.Test; diff --git a/symmetric-core/src/test/java/org/jumpmind/symmetric/service/impl/AbstractDataLoaderServiceTest.java b/symmetric-core/src/test/java/org/jumpmind/symmetric/service/impl/AbstractDataLoaderServiceTest.java index ff8a203dfb..8ff7ab9e72 100644 --- a/symmetric-core/src/test/java/org/jumpmind/symmetric/service/impl/AbstractDataLoaderServiceTest.java +++ b/symmetric-core/src/test/java/org/jumpmind/symmetric/service/impl/AbstractDataLoaderServiceTest.java @@ -179,7 +179,7 @@ public void test02Statistics() throws Exception { assertEquals(batch.getStatus(), IncomingBatch.Status.ER, "Wrong status. " + printDatabase()); assertEquals(batch.getFailedRowNumber(), 8l, "Wrong failed row number. " + batch.getSqlMessage() + ". " + printDatabase()); assertEquals(batch.getByteCount(), 483l, "Wrong byte count. " + printDatabase()); - assertEquals(batch.getStatementCount(), 8l, "Wrong statement count. " + printDatabase()); + assertEquals(batch.getLoadRowCount(), 8l, "Wrong statement count. " + printDatabase()); assertEquals(batch.getFallbackInsertCount(), 1l, "Wrong fallback insert count. " + printDatabase()); assertEquals(batch.getFallbackUpdateCount(), 2l, "Wrong fallback update count. " @@ -286,7 +286,7 @@ public void test04SqlStatistics() throws Exception { assertEquals(batch.getFailedRowNumber(), 3l, "Wrong failed row number. " + printDatabase()); Assert.assertEquals("Wrong byte count: " + batch.getByteCount() + ". " + printDatabase(), 394l, batch.getByteCount()); - assertEquals(batch.getStatementCount(), 3l, "Wrong statement count. " + printDatabase()); + assertEquals(batch.getLoadRowCount(), 3l, "Wrong statement count. " + printDatabase()); assertEquals(batch.getFallbackInsertCount(), 0l, "Wrong fallback insert count. " + printDatabase()); assertEquals(batch.getFallbackUpdateCount(), 0l, "Wrong fallback update count. " @@ -315,7 +315,7 @@ public void test05SkippingResentBatch() throws Exception { assertEquals(batch.getStatus(), IncomingBatch.Status.OK, "Wrong status"); assertEquals(batch.getSkipCount(), i); assertEquals(batch.getFailedRowNumber(), 0l, "Wrong failed row number"); - assertEquals(batch.getStatementCount(), 1l, "Wrong statement count"); + assertEquals(batch.getLoadRowCount(), 1l, "Wrong statement count"); assertEquals(batch.getFallbackInsertCount(), 0l, "Wrong fallback insert count"); assertEquals(batch.getFallbackUpdateCount(), 0l, "Wrong fallback update count"); // pause to make sure we get a different start time on the incoming @@ -338,7 +338,7 @@ public void test06ErrorWhileSkip() throws Exception { assertNotNull(batch); assertEquals(batch.getStatus(), IncomingBatch.Status.OK, "Wrong status"); assertEquals(batch.getFailedRowNumber(), 0l, "Wrong failed row number"); - assertEquals(batch.getStatementCount(), 1l, "Wrong statement count"); + assertEquals(batch.getLoadRowCount(), 1l, "Wrong statement count"); ByteArrayOutputStream out = new ByteArrayOutputStream(); CsvWriter writer = getWriter(out); @@ -394,7 +394,7 @@ public void test07DataIntregrityError() throws Exception { assertNotNull(batch); assertEquals(batch.getStatus(), IncomingBatch.Status.ER, "Wrong status"); assertEquals(batch.getFailedRowNumber(), 2l, "Wrong failed row number"); - assertEquals(batch.getStatementCount(), 2l, "Wrong statement count"); + assertEquals(batch.getLoadRowCount(), 2l, "Wrong statement count"); load(out); assertEquals(findIncomingBatchStatus(batchId, TestConstants.TEST_CLIENT_EXTERNAL_ID), @@ -404,7 +404,7 @@ public void test07DataIntregrityError() throws Exception { assertNotNull(batch); assertEquals(batch.getStatus(), IncomingBatch.Status.ER, "Wrong status"); assertEquals(batch.getFailedRowNumber(), 2l, "Wrong failed row number"); - assertEquals(batch.getStatementCount(), 2l, "Wrong statement count"); + assertEquals(batch.getLoadRowCount(), 2l, "Wrong statement count"); getSymmetricEngine().getDataLoaderService().delete(conflictSettings); setLoggingLevelForTest(old); @@ -462,7 +462,7 @@ public void test09ErrorThenSuccessBatch() throws Exception { + printDatabase()); assertEquals(batch.getFailedRowNumber(), 1l, "Wrong failed row number. " + printDatabase()); - assertEquals(batch.getStatementCount(), 1l, "Wrong statement count. " + printDatabase()); + assertEquals(batch.getLoadRowCount(), 1l, "Wrong statement count. " + printDatabase()); // pause to make sure we get a different start time on the incoming // batch batch Thread.sleep(10); @@ -480,7 +480,7 @@ public void test09ErrorThenSuccessBatch() throws Exception { assertNotNull(batch); assertEquals(batch.getStatus(), IncomingBatch.Status.OK, "Wrong status. " + printDatabase()); assertEquals(batch.getFailedRowNumber(), 0l, "Wrong failed row number. " + printDatabase()); - assertEquals(batch.getStatementCount(), 1l, "Wrong statement count. " + printDatabase()); + assertEquals(batch.getLoadRowCount(), 1l, "Wrong statement count. " + printDatabase()); setLoggingLevelForTest(old); } diff --git a/symmetric-core/src/test/java/org/jumpmind/symmetric/service/impl/AbstractRouterServiceTest.java b/symmetric-core/src/test/java/org/jumpmind/symmetric/service/impl/AbstractRouterServiceTest.java index fba608d127..c7da5e7538 100644 --- a/symmetric-core/src/test/java/org/jumpmind/symmetric/service/impl/AbstractRouterServiceTest.java +++ b/symmetric-core/src/test/java/org/jumpmind/symmetric/service/impl/AbstractRouterServiceTest.java @@ -500,7 +500,7 @@ public void testBshTransactionalRoutingOnUpdate() { Assert.assertEquals(getDbDialect().supportsTransactionId() ? 1 : 510, batches.getBatches() .size()); Assert.assertEquals(getDbDialect().supportsTransactionId() ? count : 1, (int) batches - .getBatches().get(0).getDataEventCount()); + .getBatches().get(0).getDataRowCount()); batches = getOutgoingBatchService().getOutgoingBatches(NODE_GROUP_NODE_2.getNodeId(), false); filterForChannels(batches, testChannel); @@ -512,7 +512,7 @@ public void testBshTransactionalRoutingOnUpdate() { Assert.assertEquals(getDbDialect().supportsTransactionId() ? 1 : 510, batches.getBatches() .size()); Assert.assertEquals(getDbDialect().supportsTransactionId() ? count : 1, (int) batches - .getBatches().get(0).getDataEventCount()); + .getBatches().get(0).getDataRowCount()); resetBatches(); } diff --git a/symmetric-io/src/main/java/org/jumpmind/symmetric/io/data/writer/AbstractDatabaseWriter.java b/symmetric-io/src/main/java/org/jumpmind/symmetric/io/data/writer/AbstractDatabaseWriter.java index 29f137859b..f3156adfa0 100644 --- a/symmetric-io/src/main/java/org/jumpmind/symmetric/io/data/writer/AbstractDatabaseWriter.java +++ b/symmetric-io/src/main/java/org/jumpmind/symmetric/io/data/writer/AbstractDatabaseWriter.java @@ -411,7 +411,7 @@ protected void bindVariables(Map variables) { protected boolean script(CsvData data) { try { - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); String script = data.getParsedData(CsvData.ROW_DATA)[0]; Map variables = new HashMap(); bindVariables(variables); diff --git a/symmetric-io/src/main/java/org/jumpmind/symmetric/io/data/writer/DataWriterStatisticConstants.java b/symmetric-io/src/main/java/org/jumpmind/symmetric/io/data/writer/DataWriterStatisticConstants.java index 80db3eae50..841ab668c3 100644 --- a/symmetric-io/src/main/java/org/jumpmind/symmetric/io/data/writer/DataWriterStatisticConstants.java +++ b/symmetric-io/src/main/java/org/jumpmind/symmetric/io/data/writer/DataWriterStatisticConstants.java @@ -26,7 +26,7 @@ abstract public class DataWriterStatisticConstants { public static final String BYTECOUNT = "BYTECOUNT"; public static final String TRANSFORMMILLIS = "TRANSFORMMILLIS"; public static final String FILTERMILLIS = "FILTERMILLIS"; - public static final String DATABASEMILLIS = "DATABASEMILLIS"; + public static final String LOADMILLIS = "LOADMILLIS"; public static final String STATEMENTCOUNT = "STATEMENTCOUNT"; public static final String INSERTCOUNT = "INSERTCOUNT"; public static final String DELETECOUNT = "DELETECOUNT"; diff --git a/symmetric-io/src/main/java/org/jumpmind/symmetric/io/data/writer/DefaultDatabaseWriter.java b/symmetric-io/src/main/java/org/jumpmind/symmetric/io/data/writer/DefaultDatabaseWriter.java index bcda168066..9f6df3403e 100644 --- a/symmetric-io/src/main/java/org/jumpmind/symmetric/io/data/writer/DefaultDatabaseWriter.java +++ b/symmetric-io/src/main/java/org/jumpmind/symmetric/io/data/writer/DefaultDatabaseWriter.java @@ -126,7 +126,7 @@ public void close() { protected void commit(boolean earlyCommit) { if (transaction != null) { try { - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); this.transaction.commit(); if (!earlyCommit) { notifyFiltersBatchCommitted(); @@ -134,7 +134,7 @@ protected void commit(boolean earlyCommit) { notifyFiltersEarlyCommit(); } } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } } @@ -145,11 +145,11 @@ protected void commit(boolean earlyCommit) { protected void rollback() { if (transaction != null) { try { - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); this.transaction.rollback(); notifyFiltersBatchRolledback(); } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } } @@ -159,7 +159,7 @@ protected void rollback() { @Override protected LoadStatus insert(CsvData data) { try { - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); if (requireNewStatement(DmlType.INSERT, data, false, true, null)) { this.lastUseConflictDetection = true; this.currentDmlStatement = platform.createDmlStatement(DmlType.INSERT, targetTable, writerSettings.getTextColumnExpression()); @@ -199,14 +199,14 @@ protected LoadStatus insert(CsvData data) { logFailureDetails(ex, data, true); throw ex; } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } } @Override protected LoadStatus delete(CsvData data, boolean useConflictDetection) { try { - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); Conflict conflict = writerSettings.pickConflict(this.targetTable, batch); Map lookupDataMap = null; if (requireNewStatement(DmlType.DELETE, data, useConflictDetection, useConflictDetection, @@ -315,7 +315,7 @@ protected LoadStatus delete(CsvData data, boolean useConflictDetection) { logFailureDetails(ex, data, true); throw ex; } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } } @@ -323,7 +323,7 @@ protected LoadStatus delete(CsvData data, boolean useConflictDetection) { @Override protected LoadStatus update(CsvData data, boolean applyChangesOnly, boolean useConflictDetection) { try { - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); String[] rowData = getRowData(data, CsvData.ROW_DATA); String[] oldData = getRowData(data, CsvData.OLD_DATA); ArrayList changedColumnNameList = new ArrayList(); @@ -487,7 +487,7 @@ protected LoadStatus update(CsvData data, boolean applyChangesOnly, boolean useC logFailureDetails(ex, data, true); throw ex; } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } } @@ -497,7 +497,7 @@ protected boolean create(CsvData data) { try { transaction.commit(); - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); xml = data.getParsedData(CsvData.ROW_DATA)[0]; log.info("About to create table using the following definition: {}", xml); StringReader reader = new StringReader(xml); @@ -521,14 +521,14 @@ protected boolean create(CsvData data) { log.error("Failed to alter table using the following xml: {}", xml); throw ex; } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } } @Override protected boolean sql(CsvData data) { try { - statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).startTimer(DataWriterStatisticConstants.LOADMILLIS); String script = data.getParsedData(CsvData.ROW_DATA)[0]; List sqlStatements = getSqlStatements(script); long count = 0; @@ -553,7 +553,7 @@ protected boolean sql(CsvData data) { count); return true; } finally { - statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS); + statistics.get(batch).stopTimer(DataWriterStatisticConstants.LOADMILLIS); } } diff --git a/symmetric-server/src/main/java/org/jumpmind/symmetric/web/PullUriHandler.java b/symmetric-server/src/main/java/org/jumpmind/symmetric/web/PullUriHandler.java index 310b903973..57874ffe59 100644 --- a/symmetric-server/src/main/java/org/jumpmind/symmetric/web/PullUriHandler.java +++ b/symmetric-server/src/main/java/org/jumpmind/symmetric/web/PullUriHandler.java @@ -1,70 +1,70 @@ -/** - * Licensed to JumpMind Inc under one or more contributor - * license agreements. See the NOTICE file distributed - * with this work for additional information regarding - * copyright ownership. JumpMind Inc licenses this file - * to you under the GNU General Public License, version 3.0 (GPLv3) - * (the "License"); you may not use this file except in compliance - * with the License. - * - * You should have received a copy of the GNU General Public License, - * version 3.0 (GPLv3) along with this library; if not, see - * . - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.jumpmind.symmetric.web; - -import java.io.IOException; -import java.io.OutputStream; -import java.util.List; -import java.util.Map; - -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.apache.commons.lang.StringUtils; -import org.jumpmind.symmetric.common.ParameterConstants; -import org.jumpmind.symmetric.model.ChannelMap; -import org.jumpmind.symmetric.model.Node; -import org.jumpmind.symmetric.model.NodeSecurity; -import org.jumpmind.symmetric.model.OutgoingBatch; -import org.jumpmind.symmetric.model.ProcessInfo; -import org.jumpmind.symmetric.model.ProcessInfo.Status; -import org.jumpmind.symmetric.model.ProcessInfoKey; -import org.jumpmind.symmetric.model.ProcessInfoKey.ProcessType; -import org.jumpmind.symmetric.service.IConfigurationService; -import org.jumpmind.symmetric.service.IDataExtractorService; -import org.jumpmind.symmetric.service.INodeService; -import org.jumpmind.symmetric.service.IOutgoingBatchService; -import org.jumpmind.symmetric.service.IParameterService; -import org.jumpmind.symmetric.service.IRegistrationService; -import org.jumpmind.symmetric.statistic.IStatisticManager; -import org.jumpmind.symmetric.transport.IOutgoingTransport; -import org.jumpmind.symmetric.transport.TransportUtils; - +/** + * Licensed to JumpMind Inc under one or more contributor + * license agreements. See the NOTICE file distributed + * with this work for additional information regarding + * copyright ownership. JumpMind Inc licenses this file + * to you under the GNU General Public License, version 3.0 (GPLv3) + * (the "License"); you may not use this file except in compliance + * with the License. + * + * You should have received a copy of the GNU General Public License, + * version 3.0 (GPLv3) along with this library; if not, see + * . + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.jumpmind.symmetric.web; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.List; +import java.util.Map; + +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.lang.StringUtils; +import org.jumpmind.symmetric.common.ParameterConstants; +import org.jumpmind.symmetric.model.ChannelMap; +import org.jumpmind.symmetric.model.Node; +import org.jumpmind.symmetric.model.NodeSecurity; +import org.jumpmind.symmetric.model.OutgoingBatch; +import org.jumpmind.symmetric.model.ProcessInfo; +import org.jumpmind.symmetric.model.ProcessInfo.Status; +import org.jumpmind.symmetric.model.ProcessInfoKey; +import org.jumpmind.symmetric.model.ProcessInfoKey.ProcessType; +import org.jumpmind.symmetric.service.IConfigurationService; +import org.jumpmind.symmetric.service.IDataExtractorService; +import org.jumpmind.symmetric.service.INodeService; +import org.jumpmind.symmetric.service.IOutgoingBatchService; +import org.jumpmind.symmetric.service.IParameterService; +import org.jumpmind.symmetric.service.IRegistrationService; +import org.jumpmind.symmetric.statistic.IStatisticManager; +import org.jumpmind.symmetric.transport.IOutgoingTransport; +import org.jumpmind.symmetric.transport.TransportUtils; + /** * Handles data pulls from other nodes. */ -public class PullUriHandler extends AbstractCompressionUriHandler { - - private INodeService nodeService; - - private IConfigurationService configurationService; - - private IDataExtractorService dataExtractorService; - +public class PullUriHandler extends AbstractCompressionUriHandler { + + private INodeService nodeService; + + private IConfigurationService configurationService; + + private IDataExtractorService dataExtractorService; + private IRegistrationService registrationService; - private IStatisticManager statisticManager; - - private IOutgoingBatchService outgoingBatchService; + private IStatisticManager statisticManager; + + private IOutgoingBatchService outgoingBatchService; public PullUriHandler(IParameterService parameterService, INodeService nodeService, @@ -75,7 +75,7 @@ public PullUriHandler(IParameterService parameterService, this.configurationService = configurationService; this.dataExtractorService = dataExtractorService; this.registrationService = registrationService; - this.statisticManager = statisticManager; + this.statisticManager = statisticManager; this.outgoingBatchService = outgoingBatchService; } @@ -94,15 +94,15 @@ public void handleWithCompression(HttpServletRequest req, HttpServletResponse re ChannelMap map = new ChannelMap(); map.addSuspendChannels(req.getHeader(WebConstants.SUSPENDED_CHANNELS)); map.addIgnoreChannels(req.getHeader(WebConstants.IGNORED_CHANNELS)); - map.setThreadChannel(req.getHeader(WebConstants.THREAD_CHANNEL)); - + map.setThreadChannel(req.getHeader(WebConstants.THREAD_CHANNEL)); + // pull out headers and pass to pull() method pull(nodeId, req.getRemoteHost(), req.getRemoteAddr(), res.getOutputStream(), req.getHeader(WebConstants.HEADER_ACCEPT_CHARSET), res, map); log.debug("Done with Pull request from {}", nodeId); } - + public void pull(String nodeId, String remoteHost, String remoteAddress, OutputStream outputStream, String encoding, HttpServletResponse res, ChannelMap map) throws IOException { NodeSecurity nodeSecurity = nodeService.findNodeSecurity(nodeId, true); @@ -123,22 +123,22 @@ public void pull(String nodeId, String remoteHost, String remoteAddress, IOutgoingTransport outgoingTransport = createOutgoingTransport(outputStream, encoding, map); ProcessInfo processInfo = statisticManager.newProcessInfo(new ProcessInfoKey( - nodeService.findIdentityNodeId(), map.getThreadChannel(), nodeId, ProcessType.PULL_HANDLER)); + nodeService.findIdentityNodeId(), map.getThreadChannel(), nodeId, ProcessType.PULL_HANDLER)); - try { + try { Node targetNode = nodeService.findNode(nodeId, true); List batchList = dataExtractorService.extract(processInfo, targetNode, - map.getThreadChannel(), outgoingTransport); - logDataReceivedFromPush(targetNode, batchList); - - if (processInfo.getStatus() != Status.ERROR) { + map.getThreadChannel(), outgoingTransport); + logDataReceivedFromPush(targetNode, batchList); + + if (processInfo.getStatus() != Status.ERROR) { addPendingBatchCounts(targetNode.getNodeId(), res); - processInfo.setStatus(Status.OK); + processInfo.setStatus(Status.OK); + } + } finally { + if (processInfo.getStatus() != Status.OK) { + processInfo.setStatus(Status.ERROR); } - } finally { - if (processInfo.getStatus() != Status.OK) { - processInfo.setStatus(Status.ERROR); - } } outgoingTransport.close(); } @@ -149,33 +149,33 @@ public void pull(String nodeId, String remoteHost, String remoteAddress, statisticManager.incrementNodesPulled(1); statisticManager.incrementTotalNodesPulledTime(System.currentTimeMillis() - ts); } - } - - private void addPendingBatchCounts(String targetNodeId, HttpServletResponse res) { - if (this.parameterService.is(ParameterConstants.HYBRID_PUSH_PULL_ENABLED)) { - Map batchesToSendByChannel = - this.outgoingBatchService.countOutgoingBatchesPendingByChannel(targetNodeId); - if (batchesToSendByChannel != null && !batchesToSendByChannel.isEmpty()) { - res.addHeader(WebConstants.BATCH_TO_SEND_COUNT, TransportUtils.toCSV(batchesToSendByChannel)); - } - } - } - - private void logDataReceivedFromPush(Node targetNode, List batchList) { - int batchesCount = 0; - int dataCount = 0; - for (OutgoingBatch outgoingBatch : batchList) { - if (outgoingBatch.getStatus() == org.jumpmind.symmetric.model.OutgoingBatch.Status.OK) { - batchesCount++; - dataCount += outgoingBatch.getDataEventCount(); - } - } - - if (batchesCount > 0) { - log.info( - "{} data and {} batches sent during pull request from {}", - new Object[] { dataCount, batchesCount, targetNode.toString() }); - } - } - + } + + private void addPendingBatchCounts(String targetNodeId, HttpServletResponse res) { + if (this.parameterService.is(ParameterConstants.HYBRID_PUSH_PULL_ENABLED)) { + Map batchesToSendByChannel = + this.outgoingBatchService.countOutgoingBatchesPendingByChannel(targetNodeId); + if (batchesToSendByChannel != null && !batchesToSendByChannel.isEmpty()) { + res.addHeader(WebConstants.BATCH_TO_SEND_COUNT, TransportUtils.toCSV(batchesToSendByChannel)); + } + } + } + + private void logDataReceivedFromPush(Node targetNode, List batchList) { + int batchesCount = 0; + int dataCount = 0; + for (OutgoingBatch outgoingBatch : batchList) { + if (outgoingBatch.getStatus() == org.jumpmind.symmetric.model.OutgoingBatch.Status.OK) { + batchesCount++; + dataCount += outgoingBatch.getDataRowCount(); + } + } + + if (batchesCount > 0) { + log.info( + "{} data and {} batches sent during pull request from {}", + new Object[] { dataCount, batchesCount, targetNode.toString() }); + } + } + } diff --git a/symmetric-server/src/main/java/org/jumpmind/symmetric/web/rest/RestService.java b/symmetric-server/src/main/java/org/jumpmind/symmetric/web/rest/RestService.java index c643b59fd8..9828566277 100644 --- a/symmetric-server/src/main/java/org/jumpmind/symmetric/web/rest/RestService.java +++ b/symmetric-server/src/main/java/org/jumpmind/symmetric/web/rest/RestService.java @@ -1,1779 +1,1779 @@ -/** - * Licensed to JumpMind Inc under one or more contributor - * license agreements. See the NOTICE file distributed - * with this work for additional information regarding - * copyright ownership. JumpMind Inc licenses this file - * to you under the GNU General Public License, version 3.0 (GPLv3) - * (the "License"); you may not use this file except in compliance - * with the License. - * - * You should have received a copy of the GNU General Public License, - * version 3.0 (GPLv3) along with this library; if not, see - * . - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.jumpmind.symmetric.web.rest; - -import static org.apache.commons.lang.StringUtils.isNotBlank; - -import java.io.BufferedInputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.lang.annotation.Annotation; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; - -import javax.servlet.ServletContext; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.log4j.MDC; -import org.jumpmind.db.model.Table; -import org.jumpmind.db.sql.ISqlTemplate; -import org.jumpmind.db.sql.Row; -import org.jumpmind.db.util.BasicDataSourcePropertyConstants; -import org.jumpmind.exception.IoException; -import org.jumpmind.symmetric.ISymmetricEngine; -import org.jumpmind.symmetric.common.Constants; -import org.jumpmind.symmetric.common.ParameterConstants; -import org.jumpmind.symmetric.io.data.writer.StructureDataWriter.PayloadType; -import org.jumpmind.symmetric.job.IJob; -import org.jumpmind.symmetric.job.IJobManager; -import org.jumpmind.symmetric.model.BatchAck; -import org.jumpmind.symmetric.model.BatchAckResult; -import org.jumpmind.symmetric.model.IncomingBatch; -import org.jumpmind.symmetric.model.IncomingBatch.Status; -import org.jumpmind.symmetric.model.NetworkedNode; -import org.jumpmind.symmetric.model.NodeChannel; -import org.jumpmind.symmetric.model.NodeGroupLink; -import org.jumpmind.symmetric.model.NodeHost; -import org.jumpmind.symmetric.model.NodeSecurity; -import org.jumpmind.symmetric.model.OutgoingBatch; -import org.jumpmind.symmetric.model.OutgoingBatchSummary; -import org.jumpmind.symmetric.model.OutgoingBatchWithPayload; -import org.jumpmind.symmetric.model.ProcessInfo; -import org.jumpmind.symmetric.model.ProcessInfoKey; -import org.jumpmind.symmetric.model.ProcessInfoKey.ProcessType; -import org.jumpmind.symmetric.model.Trigger; -import org.jumpmind.symmetric.model.TriggerRouter; -import org.jumpmind.symmetric.service.IAcknowledgeService; -import org.jumpmind.symmetric.service.IConfigurationService; -import org.jumpmind.symmetric.service.IDataExtractorService; -import org.jumpmind.symmetric.service.IDataLoaderService; -import org.jumpmind.symmetric.service.IDataService; -import org.jumpmind.symmetric.service.INodeService; -import org.jumpmind.symmetric.service.IOutgoingBatchService; -import org.jumpmind.symmetric.service.IRegistrationService; -import org.jumpmind.symmetric.service.ITriggerRouterService; -import org.jumpmind.symmetric.statistic.IStatisticManager; -import org.jumpmind.symmetric.web.ServerSymmetricEngine; -import org.jumpmind.symmetric.web.SymmetricEngineHolder; -import org.jumpmind.symmetric.web.WebConstants; -import org.jumpmind.symmetric.web.rest.model.Batch; -import org.jumpmind.symmetric.web.rest.model.BatchAckResults; -import org.jumpmind.symmetric.web.rest.model.BatchResult; -import org.jumpmind.symmetric.web.rest.model.BatchResults; -import org.jumpmind.symmetric.web.rest.model.BatchSummaries; -import org.jumpmind.symmetric.web.rest.model.BatchSummary; -import org.jumpmind.symmetric.web.rest.model.ChannelStatus; -import org.jumpmind.symmetric.web.rest.model.Engine; -import org.jumpmind.symmetric.web.rest.model.EngineList; -import org.jumpmind.symmetric.web.rest.model.Heartbeat; -import org.jumpmind.symmetric.web.rest.model.Node; -import org.jumpmind.symmetric.web.rest.model.NodeList; -import org.jumpmind.symmetric.web.rest.model.NodeStatus; -import org.jumpmind.symmetric.web.rest.model.PullDataResults; -import org.jumpmind.symmetric.web.rest.model.QueryResults; -import org.jumpmind.symmetric.web.rest.model.RegistrationInfo; -import org.jumpmind.symmetric.web.rest.model.RestError; -import org.jumpmind.symmetric.web.rest.model.SendSchemaRequest; -import org.jumpmind.symmetric.web.rest.model.SendSchemaResponse; -import org.jumpmind.symmetric.web.rest.model.TableName; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.ExceptionHandler; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.ResponseBody; -import org.springframework.web.bind.annotation.ResponseStatus; -import org.springframework.web.multipart.MultipartFile; - -import com.wordnik.swagger.annotations.ApiOperation; -import com.wordnik.swagger.annotations.ApiParam; - -/** - * This is a REST API for SymmetricDS. The API will be active only if - * rest.api.enable=true. The property is turned off by default. The REST API is - * available at http://hostname:port/api for the stand alone SymmetricDS - * installation. - * - *

- * General HTTP Responses to the methods: - *

    - *
  • - * ALL Methods may return the following HTTP responses.
    - *
    - * In general:
    - *
      - *
    • HTTP 2xx = Success
    • - *
    • HTTP 4xx = Problem on the caller (client) side
    • - *
    • HTTP 5xx - Problem on the REST service side
    • - *
    - * ALL Methods - *
      - *
    • HTTP 401 - Unauthorized. You have not successfully authenticated. - * Authentication details are in the response body.
    • - *
    • HTTP 404 - Not Found. You attempted to perform an operation on a resource - * that doesn't exist. I.E. you tried to start or stop an engine that doesn't - * exist.
    • - *
    • HTTP 405 - Method Not Allowed. I.E. you attempted a service call that - * uses the default engine (/engine/identity vs engine/{engine}/identity) and - * there was more than one engine found on the server.
    • - *
    • HTTP 500 - Internal Server Error. Something went wrong on the server / - * service, and we couldn't fulfill the request. Details are in the response - * body.
    • - *
    - *
  • - *
  • - * GET Methods - *
      - *
    • HTTP 200 - Success with result contained in the response body.
    • - *
    • HTTP 204 - Success with no results. Your GET request completed - * successfully, but found no matching entities.
    • - *
    - *
- *

- */ -@Controller -public class RestService { - - protected final Logger log = LoggerFactory.getLogger(getClass()); - - @Autowired - ServletContext context; - - /** - * Provides a list of {@link Engine} that are configured on the node. - * - * @return {@link EngineList} - Engines configured on the node
- * - *
-     * Example xml reponse is as follows:

- * {@code - * - * - * RootSugarDB-root - * - * - * } - *
- * Example json response is as follows:

- * {"engines":[{"name":"RootSugarDB-root"}]} - *
- */ - @ApiOperation(value = "Obtain a list of configured Engines") - @RequestMapping(value = "/enginelist", method = RequestMethod.GET) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final EngineList getEngineList() { - EngineList list = new EngineList(); - Collection engines = getSymmetricEngineHolder().getEngines() - .values(); - for (ISymmetricEngine engine : engines) { - if (engine.getParameterService().is(ParameterConstants.REST_API_ENABLED)) { - list.addEngine(new Engine(engine.getEngineName())); - } - } - return list; - } - - /** - * Provides Node information for the single engine - * - * return {@link Node}
- * - *
-     * Example xml reponse is as follows:

- * {@code - * - * 0 - * 0 - * server01 - * true - * 2012-12-20T09:26:02-05:00 - * server01 - * true - * true - * false - * http://machine-name:31415/sync/RootSugarDB-root - * - * } - *
- * Example json response is as follows:

- * {"name":"server01","externalId":"server01","registrationServer":true,"syncUrl":"http://machine-name:31415/sync/RootSugarDB-root","batchToSendCount":0,"batchInErrorCount":0,"lastHeartbeat":1356013562000,"registered":true,"initialLoaded":true,"reverseInitialLoaded":false} - *
- */ - @ApiOperation(value = "Obtain node information for the single engine") - @RequestMapping(value = "engine/node", method = RequestMethod.GET) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final Node getNode() { - return nodeImpl(getSymmetricEngine()); - } - - /** - * Provides Node information for the specified engine - */ - @ApiOperation(value = "Obtain node information for he specified engine") - @RequestMapping(value = "engine/{engine}/node", method = RequestMethod.GET) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final Node getNode(@PathVariable("engine") String engineName) { - return nodeImpl(getSymmetricEngine(engineName)); - } - - /** - * Provides a list of children that are registered with this engine. - * - * return {@link Node}
- * - *
-     * Example xml reponse is as follows:

- * {@code - * - * - * 0 - * 0 - * client01 - * true - * client01 - * true - * false - * false - * http://machine-name:31418/sync/ClientSugarDB-client01 - * - * - * } - *
- * Example json response is as follows:

- * {"nodes":[{"name":"client01","externalId":"client01","registrationServer":false,"syncUrl":"http://gwilmer-laptop:31418/sync/ClientSugarDB-client01","batchToSendCount":0,"batchInErrorCount":0,"lastHeartbeat":null,"registered":true,"initialLoaded":true,"reverseInitialLoaded":false}]} - *
- */ - @ApiOperation(value = "Obtain list of children for the single engine") - @RequestMapping(value = "engine/children", method = RequestMethod.GET) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final NodeList getChildren() { - return childrenImpl(getSymmetricEngine()); - } - - /** - * Provides a list of children {@link Node} that are registered with this - * engine. - */ - @ApiOperation(value = "Obtain list of children for the specified engine") - @RequestMapping(value = "engine/{engine}/children", method = RequestMethod.GET) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final NodeList getChildrenByEngine(@PathVariable("engine") String engineName) { - return childrenImpl(getSymmetricEngine(engineName)); - } - - /** - * Takes a snapshot for this engine and streams it to the client. The result - * of this call is a stream that should be written to a zip file. The zip - * contains configuration and operational information about the installation - * and can be used to diagnose state of the node - */ - @ApiOperation(value = "Take a diagnostic snapshot for the single engine") - @RequestMapping(value = "engine/snapshot", method = RequestMethod.GET) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final void getSnapshot(HttpServletResponse resp) { - getSnapshot(getSymmetricEngine().getEngineName(), resp); - } - - /** - * Executes a select statement on the node and returns results.
- * Example json response is as follows:
- *
- * {"nbrResults":1,"results":[{"rowNum":1,"columnData":[{"ordinal":1,"name": - * "node_id","value":"root"}]}]} - * - */ - @ApiOperation(value = "Execute the specified SQL statement on the single engine") - @RequestMapping(value = "engine/querynode", method = {RequestMethod.GET, RequestMethod.POST}) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final QueryResults getQueryNode(@RequestParam(value = "query") String sql, @RequestParam(value = "isquery", defaultValue = "true") boolean isQuery) { - return queryNodeImpl(getSymmetricEngine(), sql, isQuery); - } - - /** - * Executes a select statement on the node and returns results. - */ - @ApiOperation(value = "Execute the specified SQL statement for the specified engine") - @RequestMapping(value = "engine/{engine}/querynode", method = {RequestMethod.GET, RequestMethod.POST}) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final QueryResults getQueryNode(@PathVariable("engine") String engineName, - @RequestParam(value = "query") String sql, @RequestParam(value = "isquery", defaultValue = "true") boolean isQuery) { - return queryNodeImpl(getSymmetricEngine(engineName), sql, isQuery); - } - - /** - * Execute the named job. This can be used to control when jobs are run via and external application. You would typically - * disable the job first so it no longer runs automatically. - */ - @ApiOperation(value = "Execute the named job. This can be used to control when jobs are run via and external application. " - + "You would typically disable the job first so it no longer runs automatically. Jobs you might want to control include: " - + "job.route, job.push, job.pull, job.offline.push, job.offline.pull") - @RequestMapping(value = "engine/{engine}/invokejob", method = {RequestMethod.GET, RequestMethod.POST}) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public boolean invokeJob(@PathVariable("engine") String engineName, @RequestParam("jobname") String jobName) { - IJobManager jobManager = getSymmetricEngine(engineName).getJobManager(); - IJob job = jobManager.getJob(jobName); - if (job == null) { - log.warn("Could not find a job with the name '{}' in the '{}' engine", jobName, engineName); - return false; - } else if (!job.isRunning()) { - log.info("Invoking '{}' via the REST API", jobName); - return job.invoke(true); - } else { - log.info("Could not invoke the '{}' job via the REST API because it is already running", jobName); - return false; - } - } - - /** - * Takes a snapshot for the specified engine and streams it to the client. - */ - @ApiOperation(value = "Take a diagnostic snapshot for the specified engine") - @RequestMapping(value = "engine/{engine}/snapshot", method = RequestMethod.GET) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final void getSnapshot(@PathVariable("engine") String engineName, - HttpServletResponse resp) { - BufferedInputStream bis = null; - try { - ISymmetricEngine engine = getSymmetricEngine(engineName); - File file = engine.snapshot(); - resp.setHeader("Content-Disposition", - String.format("attachment; filename=%s", file.getName())); - bis = new BufferedInputStream(new FileInputStream(file)); - IOUtils.copy(bis, resp.getOutputStream()); - } catch (IOException e) { - throw new IoException(e); - } finally { - IOUtils.closeQuietly(bis); - } - } - - /** - * Loads a configuration profile for the single engine on the node. - * - * @param file - * A file stream that contains the profile itself. - */ - @ApiOperation(value = "Load a configuration file to the single engine") - @RequestMapping(value = "engine/profile", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postProfile(@RequestParam MultipartFile file) { - loadProfileImpl(getSymmetricEngine(), file); - } - - /** - * Loads a configuration profile for the specified engine on the node. - * - * @param file - * A file stream that contains the profile itself. - */ - @ApiOperation(value = "Load a configuration file to the specified engine") - @RequestMapping(value = "engine/{engine}/profile", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postProfileByEngine(@PathVariable("engine") String engineName, - @RequestParam(value = "file") MultipartFile file) { - - loadProfileImpl(getSymmetricEngine(engineName), file); - } - - /** - * Starts the single engine on the node - */ - @ApiOperation(value = "Start the single engine") - @RequestMapping(value = "engine/start", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postStart() { - startImpl(getSymmetricEngine()); - } - - /** - * Starts the specified engine on the node - */ - @ApiOperation(value = "Start the specified engine") - @RequestMapping(value = "engine/{engine}/start", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postStartByEngine(@PathVariable("engine") String engineName) { - startImpl(getSymmetricEngine(engineName)); - } - - /** - * Stops the single engine on the node - */ - @ApiOperation(value = "Stop the single engine") - @RequestMapping(value = "engine/stop", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postStop() { - stopImpl(getSymmetricEngine()); - } - - /** - * Stops the specified engine on the node - */ - @ApiOperation(value = "Stop the specified engine") - @RequestMapping(value = "engine/{engine}/stop", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postStopByEngine(@PathVariable("engine") String engineName) { - stopImpl(getSymmetricEngine(engineName)); - } - - /** - * Creates instances of triggers for each entry configured table/trigger for - * the single engine on the node - */ - @ApiOperation(value = "Sync triggers on the single engine") - @RequestMapping(value = "engine/synctriggers", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postSyncTriggers( - @RequestParam(required = false, value = "force") boolean force) { - syncTriggersImpl(getSymmetricEngine(), force); - } - - /** - * Creates instances of triggers for each entry configured table/trigger for - * the specified engine on the node - */ - @ApiOperation(value = "Sync triggers on the specified engine") - @RequestMapping(value = "engine/{engine}/synctriggers", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postSyncTriggersByEngine(@PathVariable("engine") String engineName, - @RequestParam(required = false, value = "force") boolean force) { - syncTriggersImpl(getSymmetricEngine(engineName), force); - } - - @ApiOperation(value = "Sync triggers on the single engine for a table") - @RequestMapping(value = "engine/synctriggers/{table}", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postSyncTriggersByTable(@PathVariable("table") String tableName, - @RequestParam(required = false, value = "catalog") String catalogName, - @RequestParam(required = false, value = "schema") String schemaName, - @RequestParam(required = false, value = "force") boolean force) { - syncTriggersByTableImpl(getSymmetricEngine(), catalogName, schemaName, tableName, force); - } - - @ApiOperation(value = "Sync triggers on the specific engine for a table") - @RequestMapping(value = "engine/{engine}/synctriggers/{table}", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postSyncTriggersByTable(@PathVariable("engine") String engineName, - @PathVariable("table") String tableName, - @RequestParam(required = false, value = "catalog") String catalogName, - @RequestParam(required = false, value = "schema") String schemaName, - @RequestParam(required = false, value = "force") boolean force) { - syncTriggersByTableImpl(getSymmetricEngine(engineName), catalogName, schemaName, tableName, - force); - } - - /** - * Send schema updates for all tables or a list of tables to a list of nodes - * or to all nodes in a group. - *

- * Example json request to send all tables to all nodes in group:
- * { "nodeGroupIdToSendTo": "target_group_name" } - *

- * Example json request to send all tables to a list of nodes:
- * { "nodeIdsToSendTo": [ "1", "2" ] } - *

- * Example json request to send a table to a list of nodes:
- * { "nodeIdsToSendTo": ["1", "2"], "tablesToSend": [ { "catalogName": "", "schemaName": "", "tableName": "A" } ] } - *

- * Example json response: - * { "nodeIdsSentTo": { "1": [ { "catalogName": null, "schemaName": null, "tableName": "A" } ] } } - * - * @param engineName - * @param request - * @return {@link SendSchemaResponse} - */ - @ApiOperation(value = "Send schema updates for all tables or a list of tables to a list of nodes or to all nodes in a group.") - @RequestMapping(value = "engine/{engine}/sendschema", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final SendSchemaResponse postSendSchema(@PathVariable("engine") String engineName, - @RequestBody SendSchemaRequest request) { - return sendSchemaImpl(getSymmetricEngine(engineName), request); - } - - /** - * Send schema updates for all tables or a list of tables to a list of nodes - * or to all nodes in a group. See - * {@link RestService#postSendSchema(String, SendSchemaRequest)} for - * additional details. - * - * @param request - * @return {@link SendSchemaResponse} - */ - @ApiOperation(value = "Send schema updates for all tables or a list of tables to a list of nodes or to all nodes in a group.") - @RequestMapping(value = "engine/sendschema", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final SendSchemaResponse postSendSchema(@RequestBody SendSchemaRequest request) { - return sendSchemaImpl(getSymmetricEngine(), request); - } - - /** - * Removes instances of triggers for each entry configured table/trigger for - * the single engine on the node - */ - @ApiOperation(value = "Drop triggers on the single engine") - @RequestMapping(value = "engine/droptriggers", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postDropTriggers() { - dropTriggersImpl(getSymmetricEngine()); - } - - /** - * Removes instances of triggers for each entry configured table/trigger for - * the specified engine on the node - */ - @ApiOperation(value = "Drop triggers on the specified engine") - @RequestMapping(value = "engine/{engine}/droptriggers", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postDropTriggersByEngine(@PathVariable("engine") String engineName) { - dropTriggersImpl(getSymmetricEngine(engineName)); - } - - /** - * Removes instances of triggers for the specified table for the single - * engine on the node - */ - @ApiOperation(value = "Drop triggers for the specified table on the single engine") - @RequestMapping(value = "engine/table/{table}/droptriggers", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postDropTriggersByTable(@PathVariable("table") String tableName) { - dropTriggersImpl(getSymmetricEngine(), tableName); - } - - /** - * Removes instances of triggers for the specified table for the single - * engine on the node - * - */ - @ApiOperation(value = "Drop triggers for the specified table on the specified engine") - @RequestMapping(value = "engine/{engine}/table/{table}/droptriggers", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postDropTriggersByEngineByTable(@PathVariable("engine") String engineName, - @PathVariable("table") String tableName) { - dropTriggersImpl(getSymmetricEngine(engineName), tableName); - } - - /** - * Installs and starts a new node - * - * @param file - * A file stream that contains the node's properties. - */ - @ApiOperation(value = "Load a configuration file to the single engine") - @RequestMapping(value = "engine/install", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postInstall(@RequestParam MultipartFile file) { - try { - Properties properties = new Properties(); - properties.load(file.getInputStream()); - getSymmetricEngineHolder().install(properties); - } catch (RuntimeException ex) { - throw ex; - } catch (Exception ex) { - throw new RuntimeException(ex); - } - } - - /** - * Uninstalls all SymmetricDS objects from the given node (database) for the - * single engine on the node - */ - @ApiOperation(value = "Uninstall SymmetricDS on the single engine") - @RequestMapping(value = "engine/uninstall", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postUninstall() { - uninstallImpl(getSymmetricEngine()); - } - - /** - * Uninstalls all SymmetricDS objects from the given node (database) for the - * specified engine on the node - * - */ - @ApiOperation(value = "Uninstall SymmetricDS on the specified engine") - @RequestMapping(value = "engine/{engine}/uninstall", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postUninstallByEngine(@PathVariable("engine") String engineName) { - uninstallImpl(getSymmetricEngine(engineName)); - } - - /** - * Reinitializes the given node (database) for the single engine on the node - */ - @ApiOperation(value = "Reinitiailize SymmetricDS on the single engine") - @RequestMapping(value = "engine/reinitialize", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postReinitialize() { - reinitializeImpl(getSymmetricEngine()); - } - - /** - * Reinitializes the given node (database) for the specified engine on the - * node - * - */ - @ApiOperation(value = "Reinitiailize SymmetricDS on the specified engine") - @RequestMapping(value = "engine/{engine}/reinitialize", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postReinitializeByEngine(@PathVariable("engine") String engineName) { - reinitializeImpl(getSymmetricEngine(engineName)); - } - - /** - * Refreshes cache for the single engine on the node - */ - @ApiOperation(value = "Refresh caches on the single engine") - @RequestMapping(value = "engine/refreshcache", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postClearCaches() { - clearCacheImpl(getSymmetricEngine()); - } - - /** - * Refreshes cache for the specified engine on the node node - * - */ - @ApiOperation(value = "Refresh caches on the specified engine") - @RequestMapping(value = "engine/{engine}/refreshcache", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postClearCachesByEngine(@PathVariable("engine") String engineName) { - clearCacheImpl(getSymmetricEngine(engineName)); - } - - /** - * Returns an overall status for the single engine of the node. - * - * @return {@link NodeStatus} - * - *

-     * Example xml reponse is as follows:

- * {@code - * - * 0 - * 0 - * Microsoft SQL Server - * 9.0 - * professional - * root - * true - * 2012-11-17 14:52:19.267 - * RootSugarDB - * root - * true - * false - * true - * 3.1.10 - * true - * http://my-machine-name:31415/sync/RootSugarDB-root - * - * } - *
- * Example json response is as follows:

- * {"started":true,"registered":true,"registrationServer":false,"initialLoaded":true, - * "nodeId":"root","nodeGroupId":"RootSugarDB","externalId":"root", - * "syncUrl":"http://my-machine-name:31415/sync/RootSugarDB-root","databaseType":"Microsoft SQL Server", - * "databaseVersion":"9.0","syncEnabled":true,"createdAtNodeId":null,"batchToSendCount":0, - * "batchInErrorCount":0,"deploymentType":"professional","symmetricVersion":"3.1.10", - * "lastHeartbeat":"2012-11-17 15:15:00.033","hearbeatInterval":null} - *
- */ - @ApiOperation(value = "Obtain the status of the single engine") - @RequestMapping(value = "/engine/status", method = RequestMethod.GET) - @ResponseBody - public final NodeStatus getStatus() { - return nodeStatusImpl(getSymmetricEngine()); - } - - /** - * Returns an overall status for the specified engine of the node. - * - * @return {@link NodeStatus} - */ - @ApiOperation(value = "Obtain the status of the specified engine") - @RequestMapping(value = "/engine/{engine}/status", method = RequestMethod.GET) - @ResponseBody - public final NodeStatus getStatusByEngine(@PathVariable("engine") String engineName) { - return nodeStatusImpl(getSymmetricEngine(engineName)); - } - - /** - * Returns status of each channel for the single engine of the node. - * - * @return Set<{@link ChannelStatus}> - */ - @ApiOperation(value = "Obtain the channel status of the single engine") - @RequestMapping(value = "/engine/channelstatus", method = RequestMethod.GET) - @ResponseBody - public final Set getChannelStatus() { - return channelStatusImpl(getSymmetricEngine()); - } - - /** - * Returns status of each channel for the specified engine of the node. - * - * @return Set<{@link ChannelStatus}> - */ - @ApiOperation(value = "Obtain the channel status of the specified engine") - @RequestMapping(value = "/engine/{engine}/channelstatus", method = RequestMethod.GET) - @ResponseBody - public final Set getChannelStatusByEngine( - @PathVariable("engine") String engineName) { - return channelStatusImpl(getSymmetricEngine(engineName)); - } - - /** - * Removes (unregisters and cleans up) a node for the single engine - */ - @ApiOperation(value = "Remove specified node (unregister and clean up) for the single engine") - @RequestMapping(value = "/engine/removenode", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postRemoveNode(@RequestParam(value = "nodeId") String nodeId) { - postRemoveNodeByEngine(nodeId, getSymmetricEngine().getEngineName()); - } - - /** - * Removes (unregisters and cleans up) a node for the single engine - */ - @ApiOperation(value = "Remove specified node (unregister and clean up) for the specified engine") - @RequestMapping(value = "/engine/{engine}/removenode", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postRemoveNodeByEngine(@RequestParam(value = "nodeId") String nodeId, - @PathVariable("engine") String engineName) { - getSymmetricEngine(engineName).removeAndCleanupNode(nodeId); - } - - /** - * Requests the server to add this node to the synchronization scenario as a - * "pull only" node - * - * @param externalId - * The external id for this node - * @param nodeGroup - * The node group to which this node belongs - * @param databaseType - * The database type for this node - * @param databaseVersion - * The database version for this node - * @param hostName - * The host name of the machine on which the client is running - * @return {@link RegistrationInfo} - * - *
-     * Example json response is as follows:

- * {"registered":false,"nodeId":null,"syncUrl":null,"nodePassword":null}
- * In the above example, the node attempted to register, but was not able to successfully register - * because registration was not open on the server. Checking the "registered" element will allow you - * to determine whether the node was successfully registered.

- * The following example shows the results from the registration after registration has been opened - * on the server for the given node.

- * {"registered":true,"nodeId":"001","syncUrl":"http://myserverhost:31415/sync/server-000","nodePassword":"1880fbffd2bc2d00e1d58bd0c734ff"}
- * The nodeId, syncUrl and nodePassword should be stored for subsequent calls to the REST API. - *
- */ - @ApiOperation(value = "Register the specified node for the single engine") - @RequestMapping(value = "/engine/registernode", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final RegistrationInfo postRegisterNode( - @RequestParam(value = "externalId") String externalId, - @RequestParam(value = "nodeGroupId") String nodeGroupId, - @RequestParam(value = "databaseType") String databaseType, - @RequestParam(value = "databaseVersion") String databaseVersion, - @RequestParam(value = "hostName") String hostName) { - return postRegisterNode(getSymmetricEngine().getEngineName(), externalId, nodeGroupId, - databaseType, databaseVersion, hostName); - } - - @ApiOperation(value = "Register the specified node for the specified engine") - @RequestMapping(value = "/engine/{engine}/registernode", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final RegistrationInfo postRegisterNode(@PathVariable("engine") String engineName, - @RequestParam(value = "externalId") String externalId, - @RequestParam(value = "nodeGroupId") String nodeGroupId, - @RequestParam(value = "databaseType") String databaseType, - @RequestParam(value = "databaseVersion") String databaseVersion, - @RequestParam(value = "hostName") String hostName) { - - ISymmetricEngine engine = getSymmetricEngine(engineName); - IRegistrationService registrationService = engine.getRegistrationService(); - INodeService nodeService = engine.getNodeService(); - RegistrationInfo regInfo = new org.jumpmind.symmetric.web.rest.model.RegistrationInfo(); - - try { - org.jumpmind.symmetric.model.Node processedNode = registrationService - .registerPullOnlyNode(externalId, nodeGroupId, databaseType, databaseVersion); - regInfo.setRegistered(processedNode.isSyncEnabled()); - if (regInfo.isRegistered()) { - regInfo.setNodeId(processedNode.getNodeId()); - NodeSecurity nodeSecurity = nodeService.findNodeSecurity(processedNode.getNodeId()); - regInfo.setNodePassword(nodeSecurity.getNodePassword()); - org.jumpmind.symmetric.model.Node modelNode = nodeService.findIdentity(); - regInfo.setSyncUrl(modelNode.getSyncUrl()); - - // do an initial heartbeat - Heartbeat heartbeat = new Heartbeat(); - heartbeat.setNodeId(regInfo.getNodeId()); - heartbeat.setHostName(hostName); - Date now = new Date(); - heartbeat.setCreateTime(now); - heartbeat.setLastRestartTime(now); - heartbeat.setHeartbeatTime(now); - this.heartbeatImpl(engine, heartbeat); - } - - // TODO: Catch a RegistrationRedirectException and redirect. - } catch (IOException e) { - throw new IoException(e); - } - return regInfo; - } - - /** - * Pulls pending batches (data) for a given node. - * - * @param nodeId - * The node id of the node requesting to pull data - * @param securityToken - * The security token or password used to authenticate the pull. - * The security token is provided during the registration - * process. - * @param useJdbcTimestampFormat - * @param useUpsertStatements - * @param useDelimitedIdentifiers - * @param hostName - * The name of the host machine requesting the pull. Only - * required if you have the rest heartbeat on pull paramter set. - * @return {@link PullDataResults} - * - * Example json response is as follows:
- *
- * {"nbrBatches":2,"batches":[{"batchId":20,"sqlStatements":[ - * "insert into table1 (field1, field2) values (value1,value2);" - * ,"update table1 set field1=value1;" - * ]},{"batchId":21,"sqlStatements" - * :["insert into table2 (field1, field2) values (value1,value2);" - * ,"update table2 set field1=value1;"]}]}
- *
- * If there are no batches to be pulled, the json response will look - * as follows:
- *
- * {"nbrBatches":0,"batches":[]} - */ - @ApiOperation(value = "Pull pending batches for the specified node for the single engine") - @RequestMapping(value = "/engine/pulldata", method = RequestMethod.GET) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final PullDataResults getPullData( - @RequestParam(value = WebConstants.NODE_ID) String nodeId, - @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table") - @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken, - @RequestParam(value = "useJdbcTimestampFormat", required = false, defaultValue = "true") boolean useJdbcTimestampFormat, - @RequestParam(value = "useUpsertStatements", required = false, defaultValue = "false") boolean useUpsertStatements, - @RequestParam(value = "useDelimitedIdentifiers", required = false, defaultValue = "true") boolean useDelimitedIdentifiers, - @RequestParam(value = "hostName", required = false) String hostName) { - return getPullData(getSymmetricEngine().getEngineName(), nodeId, securityToken, - useJdbcTimestampFormat, useUpsertStatements, useDelimitedIdentifiers, hostName); - } - - @ApiOperation(value = "Pull pending batches for the specified node for the specified engine") - @RequestMapping(value = "/engine/{engine}/pulldata", method = RequestMethod.GET) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final PullDataResults getPullData( - @PathVariable("engine") String engineName, - @RequestParam(value = WebConstants.NODE_ID) String nodeId, - @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table.") - @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken, - @RequestParam(value = "useJdbcTimestampFormat", required = false, defaultValue = "true") boolean useJdbcTimestampFormat, - @RequestParam(value = "useUpsertStatements", required = false, defaultValue = "false") boolean useUpsertStatements, - @RequestParam(value = "useDelimitedIdentifiers", required = false, defaultValue = "true") boolean useDelimitedIdentifiers, - @RequestParam(value = "hostName", required = false) String hostName) { - - ISymmetricEngine engine = getSymmetricEngine(engineName); - - IDataExtractorService dataExtractorService = engine.getDataExtractorService(); - IStatisticManager statisticManager = engine.getStatisticManager(); - INodeService nodeService = engine.getNodeService(); - org.jumpmind.symmetric.model.Node targetNode = nodeService.findNode(nodeId); - - if (securityVerified(nodeId, engine, securityToken)) { - ProcessInfo processInfo = statisticManager.newProcessInfo(new ProcessInfoKey( - nodeService.findIdentityNodeId(), nodeId, ProcessType.REST_PULL_HANLDER)); - try { - - PullDataResults results = new PullDataResults(); - List extractedBatches = dataExtractorService - .extractToPayload(processInfo, targetNode, PayloadType.SQL, - useJdbcTimestampFormat, useUpsertStatements, - useDelimitedIdentifiers); - List batches = new ArrayList(); - for (OutgoingBatchWithPayload outgoingBatchWithPayload : extractedBatches) { - if (outgoingBatchWithPayload.getStatus() == org.jumpmind.symmetric.model.OutgoingBatch.Status.LD - || outgoingBatchWithPayload.getStatus() == org.jumpmind.symmetric.model.OutgoingBatch.Status.IG) { - Batch batch = new Batch(); - batch.setBatchId(outgoingBatchWithPayload.getBatchId()); - batch.setChannelId(outgoingBatchWithPayload.getChannelId()); - batch.setSqlStatements(outgoingBatchWithPayload.getPayload()); - batches.add(batch); - } - } - results.setBatches(batches); - results.setNbrBatches(batches.size()); - processInfo.setStatus(org.jumpmind.symmetric.model.ProcessInfo.Status.OK); - - if (engine.getParameterService().is(ParameterConstants.REST_HEARTBEAT_ON_PULL) - && hostName != null) { - Heartbeat heartbeat = new Heartbeat(); - heartbeat.setNodeId(nodeId); - heartbeat.setHeartbeatTime(new Date()); - heartbeat.setHostName(hostName); - this.heartbeatImpl(engine, heartbeat); - } - return results; - } finally { - if (processInfo.getStatus() != org.jumpmind.symmetric.model.ProcessInfo.Status.OK) { - processInfo.setStatus(org.jumpmind.symmetric.model.ProcessInfo.Status.ERROR); - } - } - } else { - throw new NotAllowedException(); - } - } - - /** - * Sends a heartbeat to the server for the given node. - * - * @param nodeID - * - Required - The client nodeId this to which this heartbeat - * belongs See {@link Heartbeat} for request body requirements - */ - @ApiOperation(value = "Send a heartbeat for the single engine") - @RequestMapping(value = "/engine/heartbeat", method = RequestMethod.PUT) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void putHeartbeat( - @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table.") - @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken, - @RequestBody Heartbeat heartbeat) { - if (securityVerified(heartbeat.getNodeId(), getSymmetricEngine(), securityToken)) { - putHeartbeat(getSymmetricEngine().getEngineName(), securityToken, heartbeat); - } else { - throw new NotAllowedException(); - } - } - - /** - * Sends a heartbeat to the server for the given node. - * - * @param nodeID - * - Required - The client nodeId this to which this heartbeat - * belongs See {@link Heartbeat} for request body requirements - */ - @ApiOperation(value = "Send a heartbeat for the specified engine") - @RequestMapping(value = "/engine/{engine}/heartbeat", method = RequestMethod.PUT) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void putHeartbeat(@PathVariable("engine") String engineName, - @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table.") - @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken, - @RequestBody Heartbeat heartbeat) { - - ISymmetricEngine engine = getSymmetricEngine(engineName); - if (securityVerified(heartbeat.getNodeId(), engine, securityToken)) { - heartbeatImpl(engine, heartbeat); - } else { - throw new NotAllowedException(); - } - } - - private void heartbeatImpl(ISymmetricEngine engine, Heartbeat heartbeat) { - INodeService nodeService = engine.getNodeService(); - - NodeHost nodeHost = new NodeHost(); - if (heartbeat.getAvailableProcessors() != null) { - nodeHost.setAvailableProcessors(heartbeat.getAvailableProcessors()); - } - if (heartbeat.getCreateTime() != null) { - nodeHost.setCreateTime(heartbeat.getCreateTime()); - } - if (heartbeat.getFreeMemoryBytes() != null) { - nodeHost.setFreeMemoryBytes(heartbeat.getFreeMemoryBytes()); - } - if (heartbeat.getHeartbeatTime() != null) { - nodeHost.setHeartbeatTime(heartbeat.getHeartbeatTime()); - } - if (heartbeat.getHostName() != null) { - nodeHost.setHostName(heartbeat.getHostName()); - } - if (heartbeat.getIpAddress() != null) { - nodeHost.setIpAddress(heartbeat.getIpAddress()); - } - if (heartbeat.getJavaVendor() != null) { - nodeHost.setJavaVendor(heartbeat.getJavaVendor()); - } - if (heartbeat.getJdbcVersion() != null) { - nodeHost.setJdbcVersion(heartbeat.getJdbcVersion()); - } - if (heartbeat.getJavaVersion() != null) { - nodeHost.setJavaVersion(heartbeat.getJavaVersion()); - } - if (heartbeat.getLastRestartTime() != null) { - nodeHost.setLastRestartTime(heartbeat.getLastRestartTime()); - } - if (heartbeat.getMaxMemoryBytes() != null) { - nodeHost.setMaxMemoryBytes(heartbeat.getMaxMemoryBytes()); - } - if (heartbeat.getNodeId() != null) { - nodeHost.setNodeId(heartbeat.getNodeId()); - } - if (heartbeat.getOsArchitecture() != null) { - nodeHost.setOsArch(heartbeat.getOsArchitecture()); - } - if (heartbeat.getOsName() != null) { - nodeHost.setOsName(heartbeat.getOsName()); - } - if (heartbeat.getOsUser() != null) { - nodeHost.setOsUser(heartbeat.getOsUser()); - } - if (heartbeat.getOsVersion() != null) { - nodeHost.setOsVersion(heartbeat.getOsVersion()); - } - if (heartbeat.getSymmetricVersion() != null) { - nodeHost.setSymmetricVersion(heartbeat.getSymmetricVersion()); - } - if (heartbeat.getTimezoneOffset() != null) { - nodeHost.setTimezoneOffset(heartbeat.getTimezoneOffset()); - } - if (heartbeat.getTotalMemoryBytes() != null) { - nodeHost.setTotalMemoryBytes(heartbeat.getTotalMemoryBytes()); - } - - nodeService.updateNodeHost(nodeHost); - } - - /** - * Acknowledges a set of batches that have been pulled and processed on the - * client side. Setting the status to OK will render the batch complete. - * Setting the status to anything other than OK will queue the batch on the - * server to be sent again on the next pull. if the status is "ER". In error - * status the status description should contain relevant information about - * the error on the client including SQL Error Number and description - */ - @ApiOperation(value = "Acknowledge a set of batches for the single engine") - @RequestMapping(value = "/engine/acknowledgebatch", method = RequestMethod.PUT) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final BatchAckResults putAcknowledgeBatch( - @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table.") - @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken, - @RequestBody BatchResults batchResults) { - BatchAckResults results = putAcknowledgeBatch(getSymmetricEngine().getEngineName(), - securityToken, batchResults); - return results; - } - - @ApiOperation(value = "Acknowledge a set of batches for the specified engine") - @RequestMapping(value = "/engine/{engine}/acknowledgebatch", method = RequestMethod.PUT) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final BatchAckResults putAcknowledgeBatch(@PathVariable("engine") String engineName, - @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table.") - @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken, - @RequestBody BatchResults batchResults) { - - BatchAckResults finalResult = new BatchAckResults(); - ISymmetricEngine engine = getSymmetricEngine(engineName); - List results = null; - if (batchResults.getBatchResults().size() > 0) { - if (securityVerified(batchResults.getNodeId(), engine, securityToken)) { - IAcknowledgeService ackService = engine.getAcknowledgeService(); - List batchAcks = convertBatchResultsToAck(batchResults); - results = ackService.ack(batchAcks); - } else { - throw new NotAllowedException(); - } - } - finalResult.setBatchAckResults(results); - return finalResult; - } - - private List convertBatchResultsToAck(BatchResults batchResults) { - BatchAck batchAck = null; - List batchAcks = new ArrayList(); - long transferTimeInMillis = batchResults.getTransferTimeInMillis(); - if (transferTimeInMillis > 0) { - transferTimeInMillis = transferTimeInMillis / batchResults.getBatchResults().size(); - } - for (BatchResult batchResult : batchResults.getBatchResults()) { - batchAck = new BatchAck(batchResult.getBatchId()); - batchAck.setNodeId(batchResults.getNodeId()); - batchAck.setNetworkMillis(transferTimeInMillis); - batchAck.setDatabaseMillis(batchResult.getLoadTimeInMillis()); - if (batchResult.getStatus().equalsIgnoreCase("OK")) { - batchAck.setOk(true); - } else { - batchAck.setOk(false); - batchAck.setSqlCode(batchResult.getSqlCode()); - batchAck.setSqlState(batchResult.getSqlState().substring(0, - Math.min(batchResult.getSqlState().length(), 10))); - batchAck.setSqlMessage(batchResult.getStatusDescription()); - } - batchAcks.add(batchAck); - } - return batchAcks; - } - - /** - * Requests an initial load from the server for the node id provided. The - * initial load requst directs the server to queue up initial load data for - * the client node. Data is obtained for the initial load by the client - * calling the pull method. - * - * @param nodeID - */ - @ApiOperation(value = "Request an initial load for the specified node for the single engine") - @RequestMapping(value = "/engine/requestinitialload", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postRequestInitialLoad(@RequestParam(value = "nodeId") String nodeId) { - postRequestInitialLoad(getSymmetricEngine().getEngineName(), nodeId); - } - - /** - * Requests an initial load from the server for the node id provided. The - * initial load requst directs the server to queue up initial load data for - * the client node. Data is obtained for the initial load by the client - * calling the pull method. - * - * @param nodeID - */ - @ApiOperation(value = "Request an initial load for the specified node for the specified engine") - @RequestMapping(value = "/engine/{engine}/requestinitialload", method = RequestMethod.POST) - @ResponseStatus(HttpStatus.NO_CONTENT) - @ResponseBody - public final void postRequestInitialLoad(@PathVariable("engine") String engineName, - @RequestParam(value = "nodeId") String nodeId) { - - ISymmetricEngine engine = getSymmetricEngine(engineName); - INodeService nodeService = engine.getNodeService(); - nodeService.setInitialLoadEnabled(nodeId, true, false, -1, "restapi"); - - } - - @ApiOperation(value = "Outgoing summary of batches and data counts waiting for a node") - @RequestMapping(value = "/engine/outgoingBatchSummary", method = RequestMethod.GET) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final BatchSummaries getOutgoingBatchSummary( - @RequestParam(value = WebConstants.NODE_ID) String nodeId, - @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table.") - @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken) { - return getOutgoingBatchSummary(getSymmetricEngine().getEngineName(), nodeId, securityToken); - } - - @ApiOperation(value = "Outgoing summary of batches and data counts waiting for a node") - @RequestMapping(value = "/engine/{engine}/outgoingBatchSummary", method = RequestMethod.GET) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final BatchSummaries getOutgoingBatchSummary( - @PathVariable("engine") String engineName, - @RequestParam(value = WebConstants.NODE_ID) String nodeId, - @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table.") - @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken) { - - - ISymmetricEngine engine = getSymmetricEngine(engineName); - - if (securityVerified(nodeId, engine, securityToken)) { - BatchSummaries summaries = new BatchSummaries(); - summaries.setNodeId(nodeId); - - IOutgoingBatchService outgoingBatchService = engine.getOutgoingBatchService(); - List list = outgoingBatchService.findOutgoingBatchSummary( - OutgoingBatch.Status.RQ, OutgoingBatch.Status.QY, OutgoingBatch.Status.NE, - OutgoingBatch.Status.SE, OutgoingBatch.Status.LD, OutgoingBatch.Status.ER); - for (OutgoingBatchSummary sum : list) { - if (sum.getNodeId().equals(nodeId)) { - BatchSummary summary = new BatchSummary(); - summary.setBatchCount(sum.getBatchCount()); - summary.setDataCount(sum.getDataCount()); - summary.setOldestBatchCreateTime(sum.getOldestBatchCreateTime()); - summary.setStatus(sum.getStatus().name()); - summaries.getBatchSummaries().add(summary); - } - } - - return summaries; - } else { - throw new NotAllowedException(); - } - } - - @ApiOperation(value = "Read parameter value") - @RequestMapping(value = "engine/parameter/{name}", method = RequestMethod.GET) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final String getParameter(@PathVariable("name") String name) { - return getParameterImpl(getSymmetricEngine(), name); - } - - @ApiOperation(value = "Read paramater value for the specified engine") - @RequestMapping(value = "engine/{engine}/parameter/{name}", method = RequestMethod.GET) - @ResponseStatus(HttpStatus.OK) - @ResponseBody - public final String getParameter(@PathVariable("engine") String engineName, @PathVariable("name") String name) { - return getParameterImpl(getSymmetricEngine(engineName), name); - } - - private String getParameterImpl(ISymmetricEngine service, String name){ - String parameterName = name.replace('_', '.'); - if(parameterName.equals(BasicDataSourcePropertyConstants.DB_POOL_PASSWORD)){ - return ""; - } - return service.getParameterService().getString(parameterName); - } - - @ExceptionHandler(Exception.class) - @ResponseBody - public RestError handleError(Exception ex, HttpServletRequest req) { - int httpErrorCode = 500; - Annotation annotation = ex.getClass().getAnnotation(ResponseStatus.class); - if (annotation != null) { - httpErrorCode = ((ResponseStatus) annotation).value().value(); - } - return new RestError(ex, httpErrorCode); - } - - private void startImpl(ISymmetricEngine engine) { - engine.getParameterService().saveParameter(ParameterConstants.AUTO_START_ENGINE, "true", Constants.SYSTEM_USER); - if (!engine.start()) { - throw new InternalServerErrorException(); - } - } - - private void stopImpl(ISymmetricEngine engine) { - engine.stop(); - engine.getParameterService().saveParameter(ParameterConstants.AUTO_START_ENGINE, "false", Constants.SYSTEM_USER); - - } - - private void syncTriggersImpl(ISymmetricEngine engine, boolean force) { - - ITriggerRouterService triggerRouterService = engine.getTriggerRouterService(); - StringBuilder buffer = new StringBuilder(); - triggerRouterService.syncTriggers(buffer, force); - } - - private void syncTriggersByTableImpl(ISymmetricEngine engine, String catalogName, - String schemaName, String tableName, boolean force) { - - ITriggerRouterService triggerRouterService = engine.getTriggerRouterService(); - Table table = getSymmetricEngine().getDatabasePlatform().getTableFromCache(catalogName, - schemaName, tableName, true); - if (table == null) { - throw new NotFoundException(); - } - triggerRouterService.syncTriggers(table, force); - } - - private void dropTriggersImpl(ISymmetricEngine engine) { - ITriggerRouterService triggerRouterService = engine.getTriggerRouterService(); - triggerRouterService.dropTriggers(); - } - - private void dropTriggersImpl(ISymmetricEngine engine, String tableName) { - ITriggerRouterService triggerRouterService = engine.getTriggerRouterService(); - HashSet tables = new HashSet(); - tables.add(tableName); - triggerRouterService.dropTriggers(tables); - } - - private SendSchemaResponse sendSchemaImpl(ISymmetricEngine engine, SendSchemaRequest request) { - - IConfigurationService configurationService = engine.getConfigurationService(); - INodeService nodeService = engine.getNodeService(); - ITriggerRouterService triggerRouterService = engine.getTriggerRouterService(); - IDataService dataService = engine.getDataService(); - - SendSchemaResponse response = new SendSchemaResponse(); - - org.jumpmind.symmetric.model.Node identity = nodeService.findIdentity(); - if (identity != null) { - List nodesToSendTo = new ArrayList(); - - List nodeIds = request.getNodeIdsToSendTo(); - if (nodeIds == null || nodeIds.size() == 0) { - nodeIds = new ArrayList(); - String nodeGroupIdToSendTo = request.getNodeGroupIdToSendTo(); - if (isNotBlank(nodeGroupIdToSendTo)) { - NodeGroupLink link = configurationService.getNodeGroupLinkFor( - identity.getNodeGroupId(), nodeGroupIdToSendTo, false); - if (link != null) { - Collection nodes = nodeService - .findEnabledNodesFromNodeGroup(nodeGroupIdToSendTo); - nodesToSendTo.addAll(nodes); - } else { - log.warn("Could not send schema to all nodes in the '" - + nodeGroupIdToSendTo + "' node group. No node group link exists"); - } - } else { - log.warn("Could not send schema to nodes. There are none that were provided and the nodeGroupIdToSendTo was also not provided"); - } - } else { - for (String nodeIdToValidate : nodeIds) { - org.jumpmind.symmetric.model.Node node = nodeService.findNode(nodeIdToValidate); - if (node != null) { - NodeGroupLink link = configurationService.getNodeGroupLinkFor( - identity.getNodeGroupId(), node.getNodeGroupId(), false); - if (link != null) { - nodesToSendTo.add(node); - } else { - log.warn("Could not send schema to node '" + nodeIdToValidate - + "'. No node group link exists"); - } - } else { - log.warn("Could not send schema to node '" + nodeIdToValidate - + "'. It was not present in the database"); - } - } - } - - Map> results = response.getNodeIdsSentTo(); - List nodeIdsToSendTo = toNodeIds(nodesToSendTo); - for (String nodeId : nodeIdsToSendTo) { - results.put(nodeId, new ArrayList()); - } - - if (nodesToSendTo.size() > 0) { - List tablesToSend = request.getTablesToSend(); - List triggerRouters = triggerRouterService.getTriggerRouters(false); - for (TriggerRouter triggerRouter : triggerRouters) { - Trigger trigger = triggerRouter.getTrigger(); - NodeGroupLink link = triggerRouter.getRouter().getNodeGroupLink(); - if (link.getSourceNodeGroupId().equals(identity.getNodeGroupId())) { - for (org.jumpmind.symmetric.model.Node node : nodesToSendTo) { - if (link.getTargetNodeGroupId().equals(node.getNodeGroupId())) { - if (tablesToSend == null || tablesToSend.size() == 0 - || contains(trigger, tablesToSend)) { - dataService.sendSchema(node.getNodeId(), - trigger.getSourceCatalogName(), - trigger.getSourceSchemaName(), - trigger.getSourceTableName(), false); - results.get(node.getNodeId()).add( - new TableName(trigger.getSourceCatalogName(), trigger - .getSourceSchemaName(), trigger - .getSourceTableName())); - } - } - } - } - } - } - } - return response; - } - - private boolean contains(Trigger trigger, List tables) { - for (TableName tableName : tables) { - if (trigger.getFullyQualifiedSourceTableName().equals( - Table.getFullyQualifiedTableName(tableName.getCatalogName(), - tableName.getSchemaName(), tableName.getTableName()))) { - return true; - } - } - return false; - } - - private List toNodeIds(List nodes) { - List nodeIds = new ArrayList(nodes.size()); - for (org.jumpmind.symmetric.model.Node node : nodes) { - nodeIds.add(node.getNodeId()); - } - return nodeIds; - } - - private void uninstallImpl(ISymmetricEngine engine) { - getSymmetricEngineHolder().uninstallEngine(engine); - } - - private void reinitializeImpl(ISymmetricEngine engine) { - INodeService nodeService = engine.getNodeService(); - org.jumpmind.symmetric.model.Node modelNode = nodeService.findIdentity(); - - if (!this.isRootNode(engine, modelNode)) { - engine.uninstall(); - } - - engine.start(); - } - - private void clearCacheImpl(ISymmetricEngine engine) { - engine.clearCaches(); - } - - private void loadProfileImpl(ISymmetricEngine engine, MultipartFile file) { - - IDataLoaderService dataLoaderService = engine.getDataLoaderService(); - boolean inError = false; - try { - String content = new String(file.getBytes()); - List batches = dataLoaderService.loadDataBatch(content); - for (IncomingBatch batch : batches) { - if (batch.getStatus() == Status.ER) { - inError = true; - } - } - } catch (Exception e) { - inError = true; - } - if (inError) { - throw new InternalServerErrorException(); - } - } - - private NodeList childrenImpl(ISymmetricEngine engine) { - NodeList children = new NodeList(); - Node xmlChildNode = null; - - INodeService nodeService = engine.getNodeService(); - org.jumpmind.symmetric.model.Node modelNode = nodeService.findIdentity(); - - if (isRegistered(engine)) { - if (isRootNode(engine, modelNode)) { - NetworkedNode networkedNode = nodeService.getRootNetworkedNode(); - Set childNetwork = networkedNode.getChildren(); - if (childNetwork != null) { - for (NetworkedNode child : childNetwork) { - - List nodeHosts = nodeService.findNodeHosts(child.getNode() - .getNodeId()); - NodeSecurity nodeSecurity = nodeService.findNodeSecurity(child.getNode() - .getNodeId()); - - xmlChildNode = new Node(); - xmlChildNode.setNodeId(child.getNode().getNodeId()); - xmlChildNode.setExternalId(child.getNode().getExternalId()); - xmlChildNode.setRegistrationServer(false); - xmlChildNode.setSyncUrl(child.getNode().getSyncUrl()); - - xmlChildNode.setBatchInErrorCount(child.getNode().getBatchInErrorCount()); - xmlChildNode.setBatchToSendCount(child.getNode().getBatchToSendCount()); - if (nodeHosts.size() > 0) { - xmlChildNode.setLastHeartbeat(nodeHosts.get(0).getHeartbeatTime()); - } - xmlChildNode.setRegistered(nodeSecurity.hasRegistered()); - xmlChildNode.setInitialLoaded(nodeSecurity.hasInitialLoaded()); - xmlChildNode - .setReverseInitialLoaded(nodeSecurity.hasReverseInitialLoaded()); - if (child.getNode().getCreatedAtNodeId() == null) { - xmlChildNode.setRegistrationServer(true); - } - children.addNode(xmlChildNode); - } - } - } - } else { - throw new NotFoundException(); - } - return children; - } - - private Node nodeImpl(ISymmetricEngine engine) { - - Node xmlNode = new Node(); - if (isRegistered(engine)) { - INodeService nodeService = engine.getNodeService(); - org.jumpmind.symmetric.model.Node modelNode = nodeService.findIdentity(); - List nodeHosts = nodeService.findNodeHosts(modelNode.getNodeId()); - NodeSecurity nodeSecurity = nodeService.findNodeSecurity(modelNode.getNodeId()); - xmlNode.setNodeId(modelNode.getNodeId()); - xmlNode.setExternalId(modelNode.getExternalId()); - xmlNode.setSyncUrl(modelNode.getSyncUrl()); - xmlNode.setRegistrationUrl(engine.getParameterService().getRegistrationUrl()); - xmlNode.setBatchInErrorCount(modelNode.getBatchInErrorCount()); - xmlNode.setBatchToSendCount(modelNode.getBatchToSendCount()); - if (nodeHosts.size() > 0) { - xmlNode.setLastHeartbeat(nodeHosts.get(0).getHeartbeatTime()); - } - xmlNode.setHeartbeatInterval(engine.getParameterService().getInt( - ParameterConstants.HEARTBEAT_JOB_PERIOD_MS)); - xmlNode.setRegistered(nodeSecurity.hasRegistered()); - xmlNode.setInitialLoaded(nodeSecurity.hasInitialLoaded()); - xmlNode.setReverseInitialLoaded(nodeSecurity.hasReverseInitialLoaded()); - if (modelNode.getCreatedAtNodeId() == null) { - xmlNode.setRegistrationServer(true); - } else { - xmlNode.setRegistrationServer(false); - } - xmlNode.setCreatedAtNodeId(modelNode.getCreatedAtNodeId()); - } else { - throw new NotFoundException(); - } - return xmlNode; - } - - private boolean isRootNode(ISymmetricEngine engine, org.jumpmind.symmetric.model.Node node) { - INodeService nodeService = engine.getNodeService(); - org.jumpmind.symmetric.model.Node modelNode = nodeService.findIdentity(); - if (modelNode.getCreatedAtNodeId() == null - || modelNode.getCreatedAtNodeId().equalsIgnoreCase(modelNode.getExternalId())) { - return true; - } else { - return false; - } - } - - private boolean isRegistered(ISymmetricEngine engine) { - boolean registered = true; - INodeService nodeService = engine.getNodeService(); - org.jumpmind.symmetric.model.Node modelNode = nodeService.findIdentity(); - if (modelNode == null) { - registered = false; - } else { - NodeSecurity nodeSecurity = nodeService.findNodeSecurity(modelNode.getNodeId()); - if (nodeSecurity == null) { - registered = false; - } - } - return registered; - } - - private NodeStatus nodeStatusImpl(ISymmetricEngine engine) { - - NodeStatus status = new NodeStatus(); - if (isRegistered(engine)) { - INodeService nodeService = engine.getNodeService(); - org.jumpmind.symmetric.model.Node modelNode = nodeService.findIdentity(); - NodeSecurity nodeSecurity = nodeService.findNodeSecurity(modelNode.getNodeId()); - List nodeHost = nodeService.findNodeHosts(modelNode.getNodeId()); - status.setStarted(engine.isStarted()); - status.setRegistered(nodeSecurity.getRegistrationTime() != null); - status.setInitialLoaded(nodeSecurity.getInitialLoadTime() != null); - status.setReverseInitialLoaded(nodeSecurity.getRevInitialLoadTime() != null); - status.setNodeId(modelNode.getNodeId()); - status.setNodeGroupId(modelNode.getNodeGroupId()); - status.setExternalId(modelNode.getExternalId()); - status.setSyncUrl(modelNode.getSyncUrl()); - status.setRegistrationUrl(engine.getParameterService().getRegistrationUrl()); - status.setDatabaseType(modelNode.getDatabaseType()); - status.setDatabaseVersion(modelNode.getDatabaseVersion()); - status.setSyncEnabled(modelNode.isSyncEnabled()); - status.setCreatedAtNodeId(modelNode.getCreatedAtNodeId()); - status.setBatchToSendCount(engine.getOutgoingBatchService() - .countOutgoingBatchesUnsent()); - status.setBatchInErrorCount(engine.getOutgoingBatchService() - .countOutgoingBatchesInError()); - status.setDeploymentType(modelNode.getDeploymentType()); - if (modelNode.getCreatedAtNodeId() == null) { - status.setRegistrationServer(true); - } else { - status.setRegistrationServer(false); - } - if (nodeHost != null && nodeHost.size() > 0) { - status.setLastHeartbeat(nodeHost.get(0).getHeartbeatTime()); - } - status.setHeartbeatInterval(engine.getParameterService().getInt( - ParameterConstants.HEARTBEAT_SYNC_ON_PUSH_PERIOD_SEC)); - if (status.getHeartbeatInterval() == 0) { - status.setHeartbeatInterval(600); - } - } else { - throw new NotFoundException(); - } - return status; - } - - private Set channelStatusImpl(ISymmetricEngine engine) { - HashSet channelStatus = new HashSet(); - List channels = engine.getConfigurationService().getNodeChannels(false); - for (NodeChannel nodeChannel : channels) { - String channelId = nodeChannel.getChannelId(); - ChannelStatus status = new ChannelStatus(); - status.setChannelId(channelId); - int outgoingInError = engine.getOutgoingBatchService().countOutgoingBatchesInError( - channelId); - int incomingInError = engine.getIncomingBatchService().countIncomingBatchesInError( - channelId); - status.setBatchInErrorCount(outgoingInError); - status.setBatchToSendCount(engine.getOutgoingBatchService().countOutgoingBatchesUnsent( - channelId)); - status.setIncomingError(incomingInError > 0); - status.setOutgoingError(outgoingInError > 0); - status.setEnabled(nodeChannel.isEnabled()); - status.setIgnoreEnabled(nodeChannel.isIgnoreEnabled()); - status.setSuspendEnabled(nodeChannel.isSuspendEnabled()); - channelStatus.add(status); - } - return channelStatus; - } - - private QueryResults queryNodeImpl(ISymmetricEngine engine, String sql, boolean isQuery) { - - QueryResults results = new QueryResults(); - org.jumpmind.symmetric.web.rest.model.Row xmlRow = null; - org.jumpmind.symmetric.web.rest.model.Column xmlColumn = null; - - ISqlTemplate sqlTemplate = engine.getSqlTemplate(); - try { - if(!isQuery){ - int updates = sqlTemplate.update(sql); - results.setNbrResults(updates); - return results; - } - - List rows = sqlTemplate.query(sql); - int nbrRows = 0; - for (Row row : rows) { - xmlRow = new org.jumpmind.symmetric.web.rest.model.Row(); - Iterator> itr = row.entrySet().iterator(); - int columnOrdinal = 0; - while (itr.hasNext()) { - xmlColumn = new org.jumpmind.symmetric.web.rest.model.Column(); - xmlColumn.setOrdinal(++columnOrdinal); - Map.Entry pair = (Map.Entry) itr.next(); - xmlColumn.setName(pair.getKey()); - if (pair.getValue() != null) { - xmlColumn.setValue(pair.getValue().toString()); - } - xmlRow.getColumnData().add(xmlColumn); - } - xmlRow.setRowNum(++nbrRows); - results.getResults().add(xmlRow); - } - results.setNbrResults(nbrRows); - } catch (Exception ex) { - log.error("Exception while executing sql.", ex); - throw new NotAllowedException("Error while executing sql %s. Error is %s", sql, ex - .getCause().getMessage()); - } - return results; - } - - protected SymmetricEngineHolder getSymmetricEngineHolder() { - SymmetricEngineHolder holder = (SymmetricEngineHolder) context - .getAttribute(WebConstants.ATTR_ENGINE_HOLDER); - if (holder == null) { - throw new NotFoundException(); - } - return holder; - } - - protected ISymmetricEngine getSymmetricEngine(String engineName) { - SymmetricEngineHolder holder = getSymmetricEngineHolder(); - - ISymmetricEngine engine = null; - if (StringUtils.isNotBlank(engineName)) { - engine = holder.getEngines().get(engineName); - } - if (engine == null) { - throw new NotFoundException(); - } else if (!engine.getParameterService().is(ParameterConstants.REST_API_ENABLED)) { - throw new NotAllowedException("The REST API was not enabled for %s", - engine.getEngineName()); - } else { - MDC.put("engineName", engine.getEngineName()); - return engine; - } - } - - protected boolean securityVerified(String nodeId, ISymmetricEngine engine, String securityToken) { - - INodeService nodeService = engine.getNodeService(); - boolean allowed = false; - org.jumpmind.symmetric.model.Node targetNode = nodeService.findNode(nodeId); - if (targetNode != null) { - NodeSecurity security = nodeService.findNodeSecurity(nodeId); - allowed = security.getNodePassword().equals(securityToken); - } - return allowed; - } - - protected ISymmetricEngine getSymmetricEngine() { - ISymmetricEngine engine = null; - SymmetricEngineHolder holder = getSymmetricEngineHolder(); - - if (holder.getEngines().size() > 0) { - engine = holder.getEngines().values().iterator().next(); - } - - if (engine == null) { - throw new NotAllowedException(); - } else if (!engine.getParameterService().is(ParameterConstants.REST_API_ENABLED)) { - throw new NotAllowedException("The REST API was not enabled for %s", - engine.getEngineName()); - } else { - return engine; - } - - } - -} +/** + * Licensed to JumpMind Inc under one or more contributor + * license agreements. See the NOTICE file distributed + * with this work for additional information regarding + * copyright ownership. JumpMind Inc licenses this file + * to you under the GNU General Public License, version 3.0 (GPLv3) + * (the "License"); you may not use this file except in compliance + * with the License. + * + * You should have received a copy of the GNU General Public License, + * version 3.0 (GPLv3) along with this library; if not, see + * . + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.jumpmind.symmetric.web.rest; + +import static org.apache.commons.lang.StringUtils.isNotBlank; + +import java.io.BufferedInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.lang.annotation.Annotation; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; + +import javax.servlet.ServletContext; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.log4j.MDC; +import org.jumpmind.db.model.Table; +import org.jumpmind.db.sql.ISqlTemplate; +import org.jumpmind.db.sql.Row; +import org.jumpmind.db.util.BasicDataSourcePropertyConstants; +import org.jumpmind.exception.IoException; +import org.jumpmind.symmetric.ISymmetricEngine; +import org.jumpmind.symmetric.common.Constants; +import org.jumpmind.symmetric.common.ParameterConstants; +import org.jumpmind.symmetric.io.data.writer.StructureDataWriter.PayloadType; +import org.jumpmind.symmetric.job.IJob; +import org.jumpmind.symmetric.job.IJobManager; +import org.jumpmind.symmetric.model.BatchAck; +import org.jumpmind.symmetric.model.BatchAckResult; +import org.jumpmind.symmetric.model.IncomingBatch; +import org.jumpmind.symmetric.model.AbstractBatch.Status; +import org.jumpmind.symmetric.model.NetworkedNode; +import org.jumpmind.symmetric.model.NodeChannel; +import org.jumpmind.symmetric.model.NodeGroupLink; +import org.jumpmind.symmetric.model.NodeHost; +import org.jumpmind.symmetric.model.NodeSecurity; +import org.jumpmind.symmetric.model.OutgoingBatch; +import org.jumpmind.symmetric.model.OutgoingBatchSummary; +import org.jumpmind.symmetric.model.OutgoingBatchWithPayload; +import org.jumpmind.symmetric.model.ProcessInfo; +import org.jumpmind.symmetric.model.ProcessInfoKey; +import org.jumpmind.symmetric.model.ProcessInfoKey.ProcessType; +import org.jumpmind.symmetric.model.Trigger; +import org.jumpmind.symmetric.model.TriggerRouter; +import org.jumpmind.symmetric.service.IAcknowledgeService; +import org.jumpmind.symmetric.service.IConfigurationService; +import org.jumpmind.symmetric.service.IDataExtractorService; +import org.jumpmind.symmetric.service.IDataLoaderService; +import org.jumpmind.symmetric.service.IDataService; +import org.jumpmind.symmetric.service.INodeService; +import org.jumpmind.symmetric.service.IOutgoingBatchService; +import org.jumpmind.symmetric.service.IRegistrationService; +import org.jumpmind.symmetric.service.ITriggerRouterService; +import org.jumpmind.symmetric.statistic.IStatisticManager; +import org.jumpmind.symmetric.web.ServerSymmetricEngine; +import org.jumpmind.symmetric.web.SymmetricEngineHolder; +import org.jumpmind.symmetric.web.WebConstants; +import org.jumpmind.symmetric.web.rest.model.Batch; +import org.jumpmind.symmetric.web.rest.model.BatchAckResults; +import org.jumpmind.symmetric.web.rest.model.BatchResult; +import org.jumpmind.symmetric.web.rest.model.BatchResults; +import org.jumpmind.symmetric.web.rest.model.BatchSummaries; +import org.jumpmind.symmetric.web.rest.model.BatchSummary; +import org.jumpmind.symmetric.web.rest.model.ChannelStatus; +import org.jumpmind.symmetric.web.rest.model.Engine; +import org.jumpmind.symmetric.web.rest.model.EngineList; +import org.jumpmind.symmetric.web.rest.model.Heartbeat; +import org.jumpmind.symmetric.web.rest.model.Node; +import org.jumpmind.symmetric.web.rest.model.NodeList; +import org.jumpmind.symmetric.web.rest.model.NodeStatus; +import org.jumpmind.symmetric.web.rest.model.PullDataResults; +import org.jumpmind.symmetric.web.rest.model.QueryResults; +import org.jumpmind.symmetric.web.rest.model.RegistrationInfo; +import org.jumpmind.symmetric.web.rest.model.RestError; +import org.jumpmind.symmetric.web.rest.model.SendSchemaRequest; +import org.jumpmind.symmetric.web.rest.model.SendSchemaResponse; +import org.jumpmind.symmetric.web.rest.model.TableName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.stereotype.Controller; +import org.springframework.web.bind.annotation.ExceptionHandler; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseBody; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.multipart.MultipartFile; + +import com.wordnik.swagger.annotations.ApiOperation; +import com.wordnik.swagger.annotations.ApiParam; + +/** + * This is a REST API for SymmetricDS. The API will be active only if + * rest.api.enable=true. The property is turned off by default. The REST API is + * available at http://hostname:port/api for the stand alone SymmetricDS + * installation. + * + *

+ * General HTTP Responses to the methods: + *

    + *
  • + * ALL Methods may return the following HTTP responses.
    + *
    + * In general:
    + *
      + *
    • HTTP 2xx = Success
    • + *
    • HTTP 4xx = Problem on the caller (client) side
    • + *
    • HTTP 5xx - Problem on the REST service side
    • + *
    + * ALL Methods + *
      + *
    • HTTP 401 - Unauthorized. You have not successfully authenticated. + * Authentication details are in the response body.
    • + *
    • HTTP 404 - Not Found. You attempted to perform an operation on a resource + * that doesn't exist. I.E. you tried to start or stop an engine that doesn't + * exist.
    • + *
    • HTTP 405 - Method Not Allowed. I.E. you attempted a service call that + * uses the default engine (/engine/identity vs engine/{engine}/identity) and + * there was more than one engine found on the server.
    • + *
    • HTTP 500 - Internal Server Error. Something went wrong on the server / + * service, and we couldn't fulfill the request. Details are in the response + * body.
    • + *
    + *
  • + *
  • + * GET Methods + *
      + *
    • HTTP 200 - Success with result contained in the response body.
    • + *
    • HTTP 204 - Success with no results. Your GET request completed + * successfully, but found no matching entities.
    • + *
    + *
+ *

+ */ +@Controller +public class RestService { + + protected final Logger log = LoggerFactory.getLogger(getClass()); + + @Autowired + ServletContext context; + + /** + * Provides a list of {@link Engine} that are configured on the node. + * + * @return {@link EngineList} - Engines configured on the node
+ * + *
+     * Example xml reponse is as follows:

+ * {@code + * + * + * RootSugarDB-root + * + * + * } + *
+ * Example json response is as follows:

+ * {"engines":[{"name":"RootSugarDB-root"}]} + *
+ */ + @ApiOperation(value = "Obtain a list of configured Engines") + @RequestMapping(value = "/enginelist", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final EngineList getEngineList() { + EngineList list = new EngineList(); + Collection engines = getSymmetricEngineHolder().getEngines() + .values(); + for (ISymmetricEngine engine : engines) { + if (engine.getParameterService().is(ParameterConstants.REST_API_ENABLED)) { + list.addEngine(new Engine(engine.getEngineName())); + } + } + return list; + } + + /** + * Provides Node information for the single engine + * + * return {@link Node}
+ * + *
+     * Example xml reponse is as follows:

+ * {@code + * + * 0 + * 0 + * server01 + * true + * 2012-12-20T09:26:02-05:00 + * server01 + * true + * true + * false + * http://machine-name:31415/sync/RootSugarDB-root + * + * } + *
+ * Example json response is as follows:

+ * {"name":"server01","externalId":"server01","registrationServer":true,"syncUrl":"http://machine-name:31415/sync/RootSugarDB-root","batchToSendCount":0,"batchInErrorCount":0,"lastHeartbeat":1356013562000,"registered":true,"initialLoaded":true,"reverseInitialLoaded":false} + *
+ */ + @ApiOperation(value = "Obtain node information for the single engine") + @RequestMapping(value = "engine/node", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final Node getNode() { + return nodeImpl(getSymmetricEngine()); + } + + /** + * Provides Node information for the specified engine + */ + @ApiOperation(value = "Obtain node information for he specified engine") + @RequestMapping(value = "engine/{engine}/node", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final Node getNode(@PathVariable("engine") String engineName) { + return nodeImpl(getSymmetricEngine(engineName)); + } + + /** + * Provides a list of children that are registered with this engine. + * + * return {@link Node}
+ * + *
+     * Example xml reponse is as follows:

+ * {@code + * + * + * 0 + * 0 + * client01 + * true + * client01 + * true + * false + * false + * http://machine-name:31418/sync/ClientSugarDB-client01 + * + * + * } + *
+ * Example json response is as follows:

+ * {"nodes":[{"name":"client01","externalId":"client01","registrationServer":false,"syncUrl":"http://gwilmer-laptop:31418/sync/ClientSugarDB-client01","batchToSendCount":0,"batchInErrorCount":0,"lastHeartbeat":null,"registered":true,"initialLoaded":true,"reverseInitialLoaded":false}]} + *
+ */ + @ApiOperation(value = "Obtain list of children for the single engine") + @RequestMapping(value = "engine/children", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final NodeList getChildren() { + return childrenImpl(getSymmetricEngine()); + } + + /** + * Provides a list of children {@link Node} that are registered with this + * engine. + */ + @ApiOperation(value = "Obtain list of children for the specified engine") + @RequestMapping(value = "engine/{engine}/children", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final NodeList getChildrenByEngine(@PathVariable("engine") String engineName) { + return childrenImpl(getSymmetricEngine(engineName)); + } + + /** + * Takes a snapshot for this engine and streams it to the client. The result + * of this call is a stream that should be written to a zip file. The zip + * contains configuration and operational information about the installation + * and can be used to diagnose state of the node + */ + @ApiOperation(value = "Take a diagnostic snapshot for the single engine") + @RequestMapping(value = "engine/snapshot", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final void getSnapshot(HttpServletResponse resp) { + getSnapshot(getSymmetricEngine().getEngineName(), resp); + } + + /** + * Executes a select statement on the node and returns results.
+ * Example json response is as follows:
+ *
+ * {"nbrResults":1,"results":[{"rowNum":1,"columnData":[{"ordinal":1,"name": + * "node_id","value":"root"}]}]} + * + */ + @ApiOperation(value = "Execute the specified SQL statement on the single engine") + @RequestMapping(value = "engine/querynode", method = {RequestMethod.GET, RequestMethod.POST}) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final QueryResults getQueryNode(@RequestParam(value = "query") String sql, @RequestParam(value = "isquery", defaultValue = "true") boolean isQuery) { + return queryNodeImpl(getSymmetricEngine(), sql, isQuery); + } + + /** + * Executes a select statement on the node and returns results. + */ + @ApiOperation(value = "Execute the specified SQL statement for the specified engine") + @RequestMapping(value = "engine/{engine}/querynode", method = {RequestMethod.GET, RequestMethod.POST}) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final QueryResults getQueryNode(@PathVariable("engine") String engineName, + @RequestParam(value = "query") String sql, @RequestParam(value = "isquery", defaultValue = "true") boolean isQuery) { + return queryNodeImpl(getSymmetricEngine(engineName), sql, isQuery); + } + + /** + * Execute the named job. This can be used to control when jobs are run via and external application. You would typically + * disable the job first so it no longer runs automatically. + */ + @ApiOperation(value = "Execute the named job. This can be used to control when jobs are run via and external application. " + + "You would typically disable the job first so it no longer runs automatically. Jobs you might want to control include: " + + "job.route, job.push, job.pull, job.offline.push, job.offline.pull") + @RequestMapping(value = "engine/{engine}/invokejob", method = {RequestMethod.GET, RequestMethod.POST}) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public boolean invokeJob(@PathVariable("engine") String engineName, @RequestParam("jobname") String jobName) { + IJobManager jobManager = getSymmetricEngine(engineName).getJobManager(); + IJob job = jobManager.getJob(jobName); + if (job == null) { + log.warn("Could not find a job with the name '{}' in the '{}' engine", jobName, engineName); + return false; + } else if (!job.isRunning()) { + log.info("Invoking '{}' via the REST API", jobName); + return job.invoke(true); + } else { + log.info("Could not invoke the '{}' job via the REST API because it is already running", jobName); + return false; + } + } + + /** + * Takes a snapshot for the specified engine and streams it to the client. + */ + @ApiOperation(value = "Take a diagnostic snapshot for the specified engine") + @RequestMapping(value = "engine/{engine}/snapshot", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final void getSnapshot(@PathVariable("engine") String engineName, + HttpServletResponse resp) { + BufferedInputStream bis = null; + try { + ISymmetricEngine engine = getSymmetricEngine(engineName); + File file = engine.snapshot(); + resp.setHeader("Content-Disposition", + String.format("attachment; filename=%s", file.getName())); + bis = new BufferedInputStream(new FileInputStream(file)); + IOUtils.copy(bis, resp.getOutputStream()); + } catch (IOException e) { + throw new IoException(e); + } finally { + IOUtils.closeQuietly(bis); + } + } + + /** + * Loads a configuration profile for the single engine on the node. + * + * @param file + * A file stream that contains the profile itself. + */ + @ApiOperation(value = "Load a configuration file to the single engine") + @RequestMapping(value = "engine/profile", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postProfile(@RequestParam MultipartFile file) { + loadProfileImpl(getSymmetricEngine(), file); + } + + /** + * Loads a configuration profile for the specified engine on the node. + * + * @param file + * A file stream that contains the profile itself. + */ + @ApiOperation(value = "Load a configuration file to the specified engine") + @RequestMapping(value = "engine/{engine}/profile", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postProfileByEngine(@PathVariable("engine") String engineName, + @RequestParam(value = "file") MultipartFile file) { + + loadProfileImpl(getSymmetricEngine(engineName), file); + } + + /** + * Starts the single engine on the node + */ + @ApiOperation(value = "Start the single engine") + @RequestMapping(value = "engine/start", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postStart() { + startImpl(getSymmetricEngine()); + } + + /** + * Starts the specified engine on the node + */ + @ApiOperation(value = "Start the specified engine") + @RequestMapping(value = "engine/{engine}/start", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postStartByEngine(@PathVariable("engine") String engineName) { + startImpl(getSymmetricEngine(engineName)); + } + + /** + * Stops the single engine on the node + */ + @ApiOperation(value = "Stop the single engine") + @RequestMapping(value = "engine/stop", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postStop() { + stopImpl(getSymmetricEngine()); + } + + /** + * Stops the specified engine on the node + */ + @ApiOperation(value = "Stop the specified engine") + @RequestMapping(value = "engine/{engine}/stop", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postStopByEngine(@PathVariable("engine") String engineName) { + stopImpl(getSymmetricEngine(engineName)); + } + + /** + * Creates instances of triggers for each entry configured table/trigger for + * the single engine on the node + */ + @ApiOperation(value = "Sync triggers on the single engine") + @RequestMapping(value = "engine/synctriggers", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postSyncTriggers( + @RequestParam(required = false, value = "force") boolean force) { + syncTriggersImpl(getSymmetricEngine(), force); + } + + /** + * Creates instances of triggers for each entry configured table/trigger for + * the specified engine on the node + */ + @ApiOperation(value = "Sync triggers on the specified engine") + @RequestMapping(value = "engine/{engine}/synctriggers", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postSyncTriggersByEngine(@PathVariable("engine") String engineName, + @RequestParam(required = false, value = "force") boolean force) { + syncTriggersImpl(getSymmetricEngine(engineName), force); + } + + @ApiOperation(value = "Sync triggers on the single engine for a table") + @RequestMapping(value = "engine/synctriggers/{table}", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postSyncTriggersByTable(@PathVariable("table") String tableName, + @RequestParam(required = false, value = "catalog") String catalogName, + @RequestParam(required = false, value = "schema") String schemaName, + @RequestParam(required = false, value = "force") boolean force) { + syncTriggersByTableImpl(getSymmetricEngine(), catalogName, schemaName, tableName, force); + } + + @ApiOperation(value = "Sync triggers on the specific engine for a table") + @RequestMapping(value = "engine/{engine}/synctriggers/{table}", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postSyncTriggersByTable(@PathVariable("engine") String engineName, + @PathVariable("table") String tableName, + @RequestParam(required = false, value = "catalog") String catalogName, + @RequestParam(required = false, value = "schema") String schemaName, + @RequestParam(required = false, value = "force") boolean force) { + syncTriggersByTableImpl(getSymmetricEngine(engineName), catalogName, schemaName, tableName, + force); + } + + /** + * Send schema updates for all tables or a list of tables to a list of nodes + * or to all nodes in a group. + *

+ * Example json request to send all tables to all nodes in group:
+ * { "nodeGroupIdToSendTo": "target_group_name" } + *

+ * Example json request to send all tables to a list of nodes:
+ * { "nodeIdsToSendTo": [ "1", "2" ] } + *

+ * Example json request to send a table to a list of nodes:
+ * { "nodeIdsToSendTo": ["1", "2"], "tablesToSend": [ { "catalogName": "", "schemaName": "", "tableName": "A" } ] } + *

+ * Example json response: + * { "nodeIdsSentTo": { "1": [ { "catalogName": null, "schemaName": null, "tableName": "A" } ] } } + * + * @param engineName + * @param request + * @return {@link SendSchemaResponse} + */ + @ApiOperation(value = "Send schema updates for all tables or a list of tables to a list of nodes or to all nodes in a group.") + @RequestMapping(value = "engine/{engine}/sendschema", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final SendSchemaResponse postSendSchema(@PathVariable("engine") String engineName, + @RequestBody SendSchemaRequest request) { + return sendSchemaImpl(getSymmetricEngine(engineName), request); + } + + /** + * Send schema updates for all tables or a list of tables to a list of nodes + * or to all nodes in a group. See + * {@link RestService#postSendSchema(String, SendSchemaRequest)} for + * additional details. + * + * @param request + * @return {@link SendSchemaResponse} + */ + @ApiOperation(value = "Send schema updates for all tables or a list of tables to a list of nodes or to all nodes in a group.") + @RequestMapping(value = "engine/sendschema", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final SendSchemaResponse postSendSchema(@RequestBody SendSchemaRequest request) { + return sendSchemaImpl(getSymmetricEngine(), request); + } + + /** + * Removes instances of triggers for each entry configured table/trigger for + * the single engine on the node + */ + @ApiOperation(value = "Drop triggers on the single engine") + @RequestMapping(value = "engine/droptriggers", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postDropTriggers() { + dropTriggersImpl(getSymmetricEngine()); + } + + /** + * Removes instances of triggers for each entry configured table/trigger for + * the specified engine on the node + */ + @ApiOperation(value = "Drop triggers on the specified engine") + @RequestMapping(value = "engine/{engine}/droptriggers", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postDropTriggersByEngine(@PathVariable("engine") String engineName) { + dropTriggersImpl(getSymmetricEngine(engineName)); + } + + /** + * Removes instances of triggers for the specified table for the single + * engine on the node + */ + @ApiOperation(value = "Drop triggers for the specified table on the single engine") + @RequestMapping(value = "engine/table/{table}/droptriggers", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postDropTriggersByTable(@PathVariable("table") String tableName) { + dropTriggersImpl(getSymmetricEngine(), tableName); + } + + /** + * Removes instances of triggers for the specified table for the single + * engine on the node + * + */ + @ApiOperation(value = "Drop triggers for the specified table on the specified engine") + @RequestMapping(value = "engine/{engine}/table/{table}/droptriggers", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postDropTriggersByEngineByTable(@PathVariable("engine") String engineName, + @PathVariable("table") String tableName) { + dropTriggersImpl(getSymmetricEngine(engineName), tableName); + } + + /** + * Installs and starts a new node + * + * @param file + * A file stream that contains the node's properties. + */ + @ApiOperation(value = "Load a configuration file to the single engine") + @RequestMapping(value = "engine/install", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postInstall(@RequestParam MultipartFile file) { + try { + Properties properties = new Properties(); + properties.load(file.getInputStream()); + getSymmetricEngineHolder().install(properties); + } catch (RuntimeException ex) { + throw ex; + } catch (Exception ex) { + throw new RuntimeException(ex); + } + } + + /** + * Uninstalls all SymmetricDS objects from the given node (database) for the + * single engine on the node + */ + @ApiOperation(value = "Uninstall SymmetricDS on the single engine") + @RequestMapping(value = "engine/uninstall", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postUninstall() { + uninstallImpl(getSymmetricEngine()); + } + + /** + * Uninstalls all SymmetricDS objects from the given node (database) for the + * specified engine on the node + * + */ + @ApiOperation(value = "Uninstall SymmetricDS on the specified engine") + @RequestMapping(value = "engine/{engine}/uninstall", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postUninstallByEngine(@PathVariable("engine") String engineName) { + uninstallImpl(getSymmetricEngine(engineName)); + } + + /** + * Reinitializes the given node (database) for the single engine on the node + */ + @ApiOperation(value = "Reinitiailize SymmetricDS on the single engine") + @RequestMapping(value = "engine/reinitialize", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postReinitialize() { + reinitializeImpl(getSymmetricEngine()); + } + + /** + * Reinitializes the given node (database) for the specified engine on the + * node + * + */ + @ApiOperation(value = "Reinitiailize SymmetricDS on the specified engine") + @RequestMapping(value = "engine/{engine}/reinitialize", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postReinitializeByEngine(@PathVariable("engine") String engineName) { + reinitializeImpl(getSymmetricEngine(engineName)); + } + + /** + * Refreshes cache for the single engine on the node + */ + @ApiOperation(value = "Refresh caches on the single engine") + @RequestMapping(value = "engine/refreshcache", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postClearCaches() { + clearCacheImpl(getSymmetricEngine()); + } + + /** + * Refreshes cache for the specified engine on the node node + * + */ + @ApiOperation(value = "Refresh caches on the specified engine") + @RequestMapping(value = "engine/{engine}/refreshcache", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postClearCachesByEngine(@PathVariable("engine") String engineName) { + clearCacheImpl(getSymmetricEngine(engineName)); + } + + /** + * Returns an overall status for the single engine of the node. + * + * @return {@link NodeStatus} + * + *

+     * Example xml reponse is as follows:

+ * {@code + * + * 0 + * 0 + * Microsoft SQL Server + * 9.0 + * professional + * root + * true + * 2012-11-17 14:52:19.267 + * RootSugarDB + * root + * true + * false + * true + * 3.1.10 + * true + * http://my-machine-name:31415/sync/RootSugarDB-root + * + * } + *
+ * Example json response is as follows:

+ * {"started":true,"registered":true,"registrationServer":false,"initialLoaded":true, + * "nodeId":"root","nodeGroupId":"RootSugarDB","externalId":"root", + * "syncUrl":"http://my-machine-name:31415/sync/RootSugarDB-root","databaseType":"Microsoft SQL Server", + * "databaseVersion":"9.0","syncEnabled":true,"createdAtNodeId":null,"batchToSendCount":0, + * "batchInErrorCount":0,"deploymentType":"professional","symmetricVersion":"3.1.10", + * "lastHeartbeat":"2012-11-17 15:15:00.033","hearbeatInterval":null} + *
+ */ + @ApiOperation(value = "Obtain the status of the single engine") + @RequestMapping(value = "/engine/status", method = RequestMethod.GET) + @ResponseBody + public final NodeStatus getStatus() { + return nodeStatusImpl(getSymmetricEngine()); + } + + /** + * Returns an overall status for the specified engine of the node. + * + * @return {@link NodeStatus} + */ + @ApiOperation(value = "Obtain the status of the specified engine") + @RequestMapping(value = "/engine/{engine}/status", method = RequestMethod.GET) + @ResponseBody + public final NodeStatus getStatusByEngine(@PathVariable("engine") String engineName) { + return nodeStatusImpl(getSymmetricEngine(engineName)); + } + + /** + * Returns status of each channel for the single engine of the node. + * + * @return Set<{@link ChannelStatus}> + */ + @ApiOperation(value = "Obtain the channel status of the single engine") + @RequestMapping(value = "/engine/channelstatus", method = RequestMethod.GET) + @ResponseBody + public final Set getChannelStatus() { + return channelStatusImpl(getSymmetricEngine()); + } + + /** + * Returns status of each channel for the specified engine of the node. + * + * @return Set<{@link ChannelStatus}> + */ + @ApiOperation(value = "Obtain the channel status of the specified engine") + @RequestMapping(value = "/engine/{engine}/channelstatus", method = RequestMethod.GET) + @ResponseBody + public final Set getChannelStatusByEngine( + @PathVariable("engine") String engineName) { + return channelStatusImpl(getSymmetricEngine(engineName)); + } + + /** + * Removes (unregisters and cleans up) a node for the single engine + */ + @ApiOperation(value = "Remove specified node (unregister and clean up) for the single engine") + @RequestMapping(value = "/engine/removenode", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postRemoveNode(@RequestParam(value = "nodeId") String nodeId) { + postRemoveNodeByEngine(nodeId, getSymmetricEngine().getEngineName()); + } + + /** + * Removes (unregisters and cleans up) a node for the single engine + */ + @ApiOperation(value = "Remove specified node (unregister and clean up) for the specified engine") + @RequestMapping(value = "/engine/{engine}/removenode", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postRemoveNodeByEngine(@RequestParam(value = "nodeId") String nodeId, + @PathVariable("engine") String engineName) { + getSymmetricEngine(engineName).removeAndCleanupNode(nodeId); + } + + /** + * Requests the server to add this node to the synchronization scenario as a + * "pull only" node + * + * @param externalId + * The external id for this node + * @param nodeGroup + * The node group to which this node belongs + * @param databaseType + * The database type for this node + * @param databaseVersion + * The database version for this node + * @param hostName + * The host name of the machine on which the client is running + * @return {@link RegistrationInfo} + * + *
+     * Example json response is as follows:

+ * {"registered":false,"nodeId":null,"syncUrl":null,"nodePassword":null}
+ * In the above example, the node attempted to register, but was not able to successfully register + * because registration was not open on the server. Checking the "registered" element will allow you + * to determine whether the node was successfully registered.

+ * The following example shows the results from the registration after registration has been opened + * on the server for the given node.

+ * {"registered":true,"nodeId":"001","syncUrl":"http://myserverhost:31415/sync/server-000","nodePassword":"1880fbffd2bc2d00e1d58bd0c734ff"}
+ * The nodeId, syncUrl and nodePassword should be stored for subsequent calls to the REST API. + *
+ */ + @ApiOperation(value = "Register the specified node for the single engine") + @RequestMapping(value = "/engine/registernode", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final RegistrationInfo postRegisterNode( + @RequestParam(value = "externalId") String externalId, + @RequestParam(value = "nodeGroupId") String nodeGroupId, + @RequestParam(value = "databaseType") String databaseType, + @RequestParam(value = "databaseVersion") String databaseVersion, + @RequestParam(value = "hostName") String hostName) { + return postRegisterNode(getSymmetricEngine().getEngineName(), externalId, nodeGroupId, + databaseType, databaseVersion, hostName); + } + + @ApiOperation(value = "Register the specified node for the specified engine") + @RequestMapping(value = "/engine/{engine}/registernode", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final RegistrationInfo postRegisterNode(@PathVariable("engine") String engineName, + @RequestParam(value = "externalId") String externalId, + @RequestParam(value = "nodeGroupId") String nodeGroupId, + @RequestParam(value = "databaseType") String databaseType, + @RequestParam(value = "databaseVersion") String databaseVersion, + @RequestParam(value = "hostName") String hostName) { + + ISymmetricEngine engine = getSymmetricEngine(engineName); + IRegistrationService registrationService = engine.getRegistrationService(); + INodeService nodeService = engine.getNodeService(); + RegistrationInfo regInfo = new org.jumpmind.symmetric.web.rest.model.RegistrationInfo(); + + try { + org.jumpmind.symmetric.model.Node processedNode = registrationService + .registerPullOnlyNode(externalId, nodeGroupId, databaseType, databaseVersion); + regInfo.setRegistered(processedNode.isSyncEnabled()); + if (regInfo.isRegistered()) { + regInfo.setNodeId(processedNode.getNodeId()); + NodeSecurity nodeSecurity = nodeService.findNodeSecurity(processedNode.getNodeId()); + regInfo.setNodePassword(nodeSecurity.getNodePassword()); + org.jumpmind.symmetric.model.Node modelNode = nodeService.findIdentity(); + regInfo.setSyncUrl(modelNode.getSyncUrl()); + + // do an initial heartbeat + Heartbeat heartbeat = new Heartbeat(); + heartbeat.setNodeId(regInfo.getNodeId()); + heartbeat.setHostName(hostName); + Date now = new Date(); + heartbeat.setCreateTime(now); + heartbeat.setLastRestartTime(now); + heartbeat.setHeartbeatTime(now); + this.heartbeatImpl(engine, heartbeat); + } + + // TODO: Catch a RegistrationRedirectException and redirect. + } catch (IOException e) { + throw new IoException(e); + } + return regInfo; + } + + /** + * Pulls pending batches (data) for a given node. + * + * @param nodeId + * The node id of the node requesting to pull data + * @param securityToken + * The security token or password used to authenticate the pull. + * The security token is provided during the registration + * process. + * @param useJdbcTimestampFormat + * @param useUpsertStatements + * @param useDelimitedIdentifiers + * @param hostName + * The name of the host machine requesting the pull. Only + * required if you have the rest heartbeat on pull paramter set. + * @return {@link PullDataResults} + * + * Example json response is as follows:
+ *
+ * {"nbrBatches":2,"batches":[{"batchId":20,"sqlStatements":[ + * "insert into table1 (field1, field2) values (value1,value2);" + * ,"update table1 set field1=value1;" + * ]},{"batchId":21,"sqlStatements" + * :["insert into table2 (field1, field2) values (value1,value2);" + * ,"update table2 set field1=value1;"]}]}
+ *
+ * If there are no batches to be pulled, the json response will look + * as follows:
+ *
+ * {"nbrBatches":0,"batches":[]} + */ + @ApiOperation(value = "Pull pending batches for the specified node for the single engine") + @RequestMapping(value = "/engine/pulldata", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final PullDataResults getPullData( + @RequestParam(value = WebConstants.NODE_ID) String nodeId, + @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table") + @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken, + @RequestParam(value = "useJdbcTimestampFormat", required = false, defaultValue = "true") boolean useJdbcTimestampFormat, + @RequestParam(value = "useUpsertStatements", required = false, defaultValue = "false") boolean useUpsertStatements, + @RequestParam(value = "useDelimitedIdentifiers", required = false, defaultValue = "true") boolean useDelimitedIdentifiers, + @RequestParam(value = "hostName", required = false) String hostName) { + return getPullData(getSymmetricEngine().getEngineName(), nodeId, securityToken, + useJdbcTimestampFormat, useUpsertStatements, useDelimitedIdentifiers, hostName); + } + + @ApiOperation(value = "Pull pending batches for the specified node for the specified engine") + @RequestMapping(value = "/engine/{engine}/pulldata", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final PullDataResults getPullData( + @PathVariable("engine") String engineName, + @RequestParam(value = WebConstants.NODE_ID) String nodeId, + @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table.") + @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken, + @RequestParam(value = "useJdbcTimestampFormat", required = false, defaultValue = "true") boolean useJdbcTimestampFormat, + @RequestParam(value = "useUpsertStatements", required = false, defaultValue = "false") boolean useUpsertStatements, + @RequestParam(value = "useDelimitedIdentifiers", required = false, defaultValue = "true") boolean useDelimitedIdentifiers, + @RequestParam(value = "hostName", required = false) String hostName) { + + ISymmetricEngine engine = getSymmetricEngine(engineName); + + IDataExtractorService dataExtractorService = engine.getDataExtractorService(); + IStatisticManager statisticManager = engine.getStatisticManager(); + INodeService nodeService = engine.getNodeService(); + org.jumpmind.symmetric.model.Node targetNode = nodeService.findNode(nodeId); + + if (securityVerified(nodeId, engine, securityToken)) { + ProcessInfo processInfo = statisticManager.newProcessInfo(new ProcessInfoKey( + nodeService.findIdentityNodeId(), nodeId, ProcessType.REST_PULL_HANLDER)); + try { + + PullDataResults results = new PullDataResults(); + List extractedBatches = dataExtractorService + .extractToPayload(processInfo, targetNode, PayloadType.SQL, + useJdbcTimestampFormat, useUpsertStatements, + useDelimitedIdentifiers); + List batches = new ArrayList(); + for (OutgoingBatchWithPayload outgoingBatchWithPayload : extractedBatches) { + if (outgoingBatchWithPayload.getStatus() == org.jumpmind.symmetric.model.OutgoingBatch.Status.LD + || outgoingBatchWithPayload.getStatus() == org.jumpmind.symmetric.model.OutgoingBatch.Status.IG) { + Batch batch = new Batch(); + batch.setBatchId(outgoingBatchWithPayload.getBatchId()); + batch.setChannelId(outgoingBatchWithPayload.getChannelId()); + batch.setSqlStatements(outgoingBatchWithPayload.getPayload()); + batches.add(batch); + } + } + results.setBatches(batches); + results.setNbrBatches(batches.size()); + processInfo.setStatus(org.jumpmind.symmetric.model.ProcessInfo.Status.OK); + + if (engine.getParameterService().is(ParameterConstants.REST_HEARTBEAT_ON_PULL) + && hostName != null) { + Heartbeat heartbeat = new Heartbeat(); + heartbeat.setNodeId(nodeId); + heartbeat.setHeartbeatTime(new Date()); + heartbeat.setHostName(hostName); + this.heartbeatImpl(engine, heartbeat); + } + return results; + } finally { + if (processInfo.getStatus() != org.jumpmind.symmetric.model.ProcessInfo.Status.OK) { + processInfo.setStatus(org.jumpmind.symmetric.model.ProcessInfo.Status.ERROR); + } + } + } else { + throw new NotAllowedException(); + } + } + + /** + * Sends a heartbeat to the server for the given node. + * + * @param nodeID + * - Required - The client nodeId this to which this heartbeat + * belongs See {@link Heartbeat} for request body requirements + */ + @ApiOperation(value = "Send a heartbeat for the single engine") + @RequestMapping(value = "/engine/heartbeat", method = RequestMethod.PUT) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void putHeartbeat( + @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table.") + @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken, + @RequestBody Heartbeat heartbeat) { + if (securityVerified(heartbeat.getNodeId(), getSymmetricEngine(), securityToken)) { + putHeartbeat(getSymmetricEngine().getEngineName(), securityToken, heartbeat); + } else { + throw new NotAllowedException(); + } + } + + /** + * Sends a heartbeat to the server for the given node. + * + * @param nodeID + * - Required - The client nodeId this to which this heartbeat + * belongs See {@link Heartbeat} for request body requirements + */ + @ApiOperation(value = "Send a heartbeat for the specified engine") + @RequestMapping(value = "/engine/{engine}/heartbeat", method = RequestMethod.PUT) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void putHeartbeat(@PathVariable("engine") String engineName, + @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table.") + @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken, + @RequestBody Heartbeat heartbeat) { + + ISymmetricEngine engine = getSymmetricEngine(engineName); + if (securityVerified(heartbeat.getNodeId(), engine, securityToken)) { + heartbeatImpl(engine, heartbeat); + } else { + throw new NotAllowedException(); + } + } + + private void heartbeatImpl(ISymmetricEngine engine, Heartbeat heartbeat) { + INodeService nodeService = engine.getNodeService(); + + NodeHost nodeHost = new NodeHost(); + if (heartbeat.getAvailableProcessors() != null) { + nodeHost.setAvailableProcessors(heartbeat.getAvailableProcessors()); + } + if (heartbeat.getCreateTime() != null) { + nodeHost.setCreateTime(heartbeat.getCreateTime()); + } + if (heartbeat.getFreeMemoryBytes() != null) { + nodeHost.setFreeMemoryBytes(heartbeat.getFreeMemoryBytes()); + } + if (heartbeat.getHeartbeatTime() != null) { + nodeHost.setHeartbeatTime(heartbeat.getHeartbeatTime()); + } + if (heartbeat.getHostName() != null) { + nodeHost.setHostName(heartbeat.getHostName()); + } + if (heartbeat.getIpAddress() != null) { + nodeHost.setIpAddress(heartbeat.getIpAddress()); + } + if (heartbeat.getJavaVendor() != null) { + nodeHost.setJavaVendor(heartbeat.getJavaVendor()); + } + if (heartbeat.getJdbcVersion() != null) { + nodeHost.setJdbcVersion(heartbeat.getJdbcVersion()); + } + if (heartbeat.getJavaVersion() != null) { + nodeHost.setJavaVersion(heartbeat.getJavaVersion()); + } + if (heartbeat.getLastRestartTime() != null) { + nodeHost.setLastRestartTime(heartbeat.getLastRestartTime()); + } + if (heartbeat.getMaxMemoryBytes() != null) { + nodeHost.setMaxMemoryBytes(heartbeat.getMaxMemoryBytes()); + } + if (heartbeat.getNodeId() != null) { + nodeHost.setNodeId(heartbeat.getNodeId()); + } + if (heartbeat.getOsArchitecture() != null) { + nodeHost.setOsArch(heartbeat.getOsArchitecture()); + } + if (heartbeat.getOsName() != null) { + nodeHost.setOsName(heartbeat.getOsName()); + } + if (heartbeat.getOsUser() != null) { + nodeHost.setOsUser(heartbeat.getOsUser()); + } + if (heartbeat.getOsVersion() != null) { + nodeHost.setOsVersion(heartbeat.getOsVersion()); + } + if (heartbeat.getSymmetricVersion() != null) { + nodeHost.setSymmetricVersion(heartbeat.getSymmetricVersion()); + } + if (heartbeat.getTimezoneOffset() != null) { + nodeHost.setTimezoneOffset(heartbeat.getTimezoneOffset()); + } + if (heartbeat.getTotalMemoryBytes() != null) { + nodeHost.setTotalMemoryBytes(heartbeat.getTotalMemoryBytes()); + } + + nodeService.updateNodeHost(nodeHost); + } + + /** + * Acknowledges a set of batches that have been pulled and processed on the + * client side. Setting the status to OK will render the batch complete. + * Setting the status to anything other than OK will queue the batch on the + * server to be sent again on the next pull. if the status is "ER". In error + * status the status description should contain relevant information about + * the error on the client including SQL Error Number and description + */ + @ApiOperation(value = "Acknowledge a set of batches for the single engine") + @RequestMapping(value = "/engine/acknowledgebatch", method = RequestMethod.PUT) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final BatchAckResults putAcknowledgeBatch( + @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table.") + @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken, + @RequestBody BatchResults batchResults) { + BatchAckResults results = putAcknowledgeBatch(getSymmetricEngine().getEngineName(), + securityToken, batchResults); + return results; + } + + @ApiOperation(value = "Acknowledge a set of batches for the specified engine") + @RequestMapping(value = "/engine/{engine}/acknowledgebatch", method = RequestMethod.PUT) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final BatchAckResults putAcknowledgeBatch(@PathVariable("engine") String engineName, + @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table.") + @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken, + @RequestBody BatchResults batchResults) { + + BatchAckResults finalResult = new BatchAckResults(); + ISymmetricEngine engine = getSymmetricEngine(engineName); + List results = null; + if (batchResults.getBatchResults().size() > 0) { + if (securityVerified(batchResults.getNodeId(), engine, securityToken)) { + IAcknowledgeService ackService = engine.getAcknowledgeService(); + List batchAcks = convertBatchResultsToAck(batchResults); + results = ackService.ack(batchAcks); + } else { + throw new NotAllowedException(); + } + } + finalResult.setBatchAckResults(results); + return finalResult; + } + + private List convertBatchResultsToAck(BatchResults batchResults) { + BatchAck batchAck = null; + List batchAcks = new ArrayList(); + long transferTimeInMillis = batchResults.getTransferTimeInMillis(); + if (transferTimeInMillis > 0) { + transferTimeInMillis = transferTimeInMillis / batchResults.getBatchResults().size(); + } + for (BatchResult batchResult : batchResults.getBatchResults()) { + batchAck = new BatchAck(batchResult.getBatchId()); + batchAck.setNodeId(batchResults.getNodeId()); + batchAck.setNetworkMillis(transferTimeInMillis); + batchAck.setLoadMillis(batchResult.getLoadTimeInMillis()); + if (batchResult.getStatus().equalsIgnoreCase("OK")) { + batchAck.setOk(true); + } else { + batchAck.setOk(false); + batchAck.setSqlCode(batchResult.getSqlCode()); + batchAck.setSqlState(batchResult.getSqlState().substring(0, + Math.min(batchResult.getSqlState().length(), 10))); + batchAck.setSqlMessage(batchResult.getStatusDescription()); + } + batchAcks.add(batchAck); + } + return batchAcks; + } + + /** + * Requests an initial load from the server for the node id provided. The + * initial load requst directs the server to queue up initial load data for + * the client node. Data is obtained for the initial load by the client + * calling the pull method. + * + * @param nodeID + */ + @ApiOperation(value = "Request an initial load for the specified node for the single engine") + @RequestMapping(value = "/engine/requestinitialload", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postRequestInitialLoad(@RequestParam(value = "nodeId") String nodeId) { + postRequestInitialLoad(getSymmetricEngine().getEngineName(), nodeId); + } + + /** + * Requests an initial load from the server for the node id provided. The + * initial load requst directs the server to queue up initial load data for + * the client node. Data is obtained for the initial load by the client + * calling the pull method. + * + * @param nodeID + */ + @ApiOperation(value = "Request an initial load for the specified node for the specified engine") + @RequestMapping(value = "/engine/{engine}/requestinitialload", method = RequestMethod.POST) + @ResponseStatus(HttpStatus.NO_CONTENT) + @ResponseBody + public final void postRequestInitialLoad(@PathVariable("engine") String engineName, + @RequestParam(value = "nodeId") String nodeId) { + + ISymmetricEngine engine = getSymmetricEngine(engineName); + INodeService nodeService = engine.getNodeService(); + nodeService.setInitialLoadEnabled(nodeId, true, false, -1, "restapi"); + + } + + @ApiOperation(value = "Outgoing summary of batches and data counts waiting for a node") + @RequestMapping(value = "/engine/outgoingBatchSummary", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final BatchSummaries getOutgoingBatchSummary( + @RequestParam(value = WebConstants.NODE_ID) String nodeId, + @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table.") + @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken) { + return getOutgoingBatchSummary(getSymmetricEngine().getEngineName(), nodeId, securityToken); + } + + @ApiOperation(value = "Outgoing summary of batches and data counts waiting for a node") + @RequestMapping(value = "/engine/{engine}/outgoingBatchSummary", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final BatchSummaries getOutgoingBatchSummary( + @PathVariable("engine") String engineName, + @RequestParam(value = WebConstants.NODE_ID) String nodeId, + @ApiParam(value="This the password for the nodeId being passed in. The password is stored in the node_security table.") + @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken) { + + + ISymmetricEngine engine = getSymmetricEngine(engineName); + + if (securityVerified(nodeId, engine, securityToken)) { + BatchSummaries summaries = new BatchSummaries(); + summaries.setNodeId(nodeId); + + IOutgoingBatchService outgoingBatchService = engine.getOutgoingBatchService(); + List list = outgoingBatchService.findOutgoingBatchSummary( + OutgoingBatch.Status.RQ, OutgoingBatch.Status.QY, OutgoingBatch.Status.NE, + OutgoingBatch.Status.SE, OutgoingBatch.Status.LD, OutgoingBatch.Status.ER); + for (OutgoingBatchSummary sum : list) { + if (sum.getNodeId().equals(nodeId)) { + BatchSummary summary = new BatchSummary(); + summary.setBatchCount(sum.getBatchCount()); + summary.setDataCount(sum.getDataCount()); + summary.setOldestBatchCreateTime(sum.getOldestBatchCreateTime()); + summary.setStatus(sum.getStatus().name()); + summaries.getBatchSummaries().add(summary); + } + } + + return summaries; + } else { + throw new NotAllowedException(); + } + } + + @ApiOperation(value = "Read parameter value") + @RequestMapping(value = "engine/parameter/{name}", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final String getParameter(@PathVariable("name") String name) { + return getParameterImpl(getSymmetricEngine(), name); + } + + @ApiOperation(value = "Read paramater value for the specified engine") + @RequestMapping(value = "engine/{engine}/parameter/{name}", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + @ResponseBody + public final String getParameter(@PathVariable("engine") String engineName, @PathVariable("name") String name) { + return getParameterImpl(getSymmetricEngine(engineName), name); + } + + private String getParameterImpl(ISymmetricEngine service, String name){ + String parameterName = name.replace('_', '.'); + if(parameterName.equals(BasicDataSourcePropertyConstants.DB_POOL_PASSWORD)){ + return ""; + } + return service.getParameterService().getString(parameterName); + } + + @ExceptionHandler(Exception.class) + @ResponseBody + public RestError handleError(Exception ex, HttpServletRequest req) { + int httpErrorCode = 500; + Annotation annotation = ex.getClass().getAnnotation(ResponseStatus.class); + if (annotation != null) { + httpErrorCode = ((ResponseStatus) annotation).value().value(); + } + return new RestError(ex, httpErrorCode); + } + + private void startImpl(ISymmetricEngine engine) { + engine.getParameterService().saveParameter(ParameterConstants.AUTO_START_ENGINE, "true", Constants.SYSTEM_USER); + if (!engine.start()) { + throw new InternalServerErrorException(); + } + } + + private void stopImpl(ISymmetricEngine engine) { + engine.stop(); + engine.getParameterService().saveParameter(ParameterConstants.AUTO_START_ENGINE, "false", Constants.SYSTEM_USER); + + } + + private void syncTriggersImpl(ISymmetricEngine engine, boolean force) { + + ITriggerRouterService triggerRouterService = engine.getTriggerRouterService(); + StringBuilder buffer = new StringBuilder(); + triggerRouterService.syncTriggers(buffer, force); + } + + private void syncTriggersByTableImpl(ISymmetricEngine engine, String catalogName, + String schemaName, String tableName, boolean force) { + + ITriggerRouterService triggerRouterService = engine.getTriggerRouterService(); + Table table = getSymmetricEngine().getDatabasePlatform().getTableFromCache(catalogName, + schemaName, tableName, true); + if (table == null) { + throw new NotFoundException(); + } + triggerRouterService.syncTriggers(table, force); + } + + private void dropTriggersImpl(ISymmetricEngine engine) { + ITriggerRouterService triggerRouterService = engine.getTriggerRouterService(); + triggerRouterService.dropTriggers(); + } + + private void dropTriggersImpl(ISymmetricEngine engine, String tableName) { + ITriggerRouterService triggerRouterService = engine.getTriggerRouterService(); + HashSet tables = new HashSet(); + tables.add(tableName); + triggerRouterService.dropTriggers(tables); + } + + private SendSchemaResponse sendSchemaImpl(ISymmetricEngine engine, SendSchemaRequest request) { + + IConfigurationService configurationService = engine.getConfigurationService(); + INodeService nodeService = engine.getNodeService(); + ITriggerRouterService triggerRouterService = engine.getTriggerRouterService(); + IDataService dataService = engine.getDataService(); + + SendSchemaResponse response = new SendSchemaResponse(); + + org.jumpmind.symmetric.model.Node identity = nodeService.findIdentity(); + if (identity != null) { + List nodesToSendTo = new ArrayList(); + + List nodeIds = request.getNodeIdsToSendTo(); + if (nodeIds == null || nodeIds.size() == 0) { + nodeIds = new ArrayList(); + String nodeGroupIdToSendTo = request.getNodeGroupIdToSendTo(); + if (isNotBlank(nodeGroupIdToSendTo)) { + NodeGroupLink link = configurationService.getNodeGroupLinkFor( + identity.getNodeGroupId(), nodeGroupIdToSendTo, false); + if (link != null) { + Collection nodes = nodeService + .findEnabledNodesFromNodeGroup(nodeGroupIdToSendTo); + nodesToSendTo.addAll(nodes); + } else { + log.warn("Could not send schema to all nodes in the '" + + nodeGroupIdToSendTo + "' node group. No node group link exists"); + } + } else { + log.warn("Could not send schema to nodes. There are none that were provided and the nodeGroupIdToSendTo was also not provided"); + } + } else { + for (String nodeIdToValidate : nodeIds) { + org.jumpmind.symmetric.model.Node node = nodeService.findNode(nodeIdToValidate); + if (node != null) { + NodeGroupLink link = configurationService.getNodeGroupLinkFor( + identity.getNodeGroupId(), node.getNodeGroupId(), false); + if (link != null) { + nodesToSendTo.add(node); + } else { + log.warn("Could not send schema to node '" + nodeIdToValidate + + "'. No node group link exists"); + } + } else { + log.warn("Could not send schema to node '" + nodeIdToValidate + + "'. It was not present in the database"); + } + } + } + + Map> results = response.getNodeIdsSentTo(); + List nodeIdsToSendTo = toNodeIds(nodesToSendTo); + for (String nodeId : nodeIdsToSendTo) { + results.put(nodeId, new ArrayList()); + } + + if (nodesToSendTo.size() > 0) { + List tablesToSend = request.getTablesToSend(); + List triggerRouters = triggerRouterService.getTriggerRouters(false); + for (TriggerRouter triggerRouter : triggerRouters) { + Trigger trigger = triggerRouter.getTrigger(); + NodeGroupLink link = triggerRouter.getRouter().getNodeGroupLink(); + if (link.getSourceNodeGroupId().equals(identity.getNodeGroupId())) { + for (org.jumpmind.symmetric.model.Node node : nodesToSendTo) { + if (link.getTargetNodeGroupId().equals(node.getNodeGroupId())) { + if (tablesToSend == null || tablesToSend.size() == 0 + || contains(trigger, tablesToSend)) { + dataService.sendSchema(node.getNodeId(), + trigger.getSourceCatalogName(), + trigger.getSourceSchemaName(), + trigger.getSourceTableName(), false); + results.get(node.getNodeId()).add( + new TableName(trigger.getSourceCatalogName(), trigger + .getSourceSchemaName(), trigger + .getSourceTableName())); + } + } + } + } + } + } + } + return response; + } + + private boolean contains(Trigger trigger, List tables) { + for (TableName tableName : tables) { + if (trigger.getFullyQualifiedSourceTableName().equals( + Table.getFullyQualifiedTableName(tableName.getCatalogName(), + tableName.getSchemaName(), tableName.getTableName()))) { + return true; + } + } + return false; + } + + private List toNodeIds(List nodes) { + List nodeIds = new ArrayList(nodes.size()); + for (org.jumpmind.symmetric.model.Node node : nodes) { + nodeIds.add(node.getNodeId()); + } + return nodeIds; + } + + private void uninstallImpl(ISymmetricEngine engine) { + getSymmetricEngineHolder().uninstallEngine(engine); + } + + private void reinitializeImpl(ISymmetricEngine engine) { + INodeService nodeService = engine.getNodeService(); + org.jumpmind.symmetric.model.Node modelNode = nodeService.findIdentity(); + + if (!this.isRootNode(engine, modelNode)) { + engine.uninstall(); + } + + engine.start(); + } + + private void clearCacheImpl(ISymmetricEngine engine) { + engine.clearCaches(); + } + + private void loadProfileImpl(ISymmetricEngine engine, MultipartFile file) { + + IDataLoaderService dataLoaderService = engine.getDataLoaderService(); + boolean inError = false; + try { + String content = new String(file.getBytes()); + List batches = dataLoaderService.loadDataBatch(content); + for (IncomingBatch batch : batches) { + if (batch.getStatus() == Status.ER) { + inError = true; + } + } + } catch (Exception e) { + inError = true; + } + if (inError) { + throw new InternalServerErrorException(); + } + } + + private NodeList childrenImpl(ISymmetricEngine engine) { + NodeList children = new NodeList(); + Node xmlChildNode = null; + + INodeService nodeService = engine.getNodeService(); + org.jumpmind.symmetric.model.Node modelNode = nodeService.findIdentity(); + + if (isRegistered(engine)) { + if (isRootNode(engine, modelNode)) { + NetworkedNode networkedNode = nodeService.getRootNetworkedNode(); + Set childNetwork = networkedNode.getChildren(); + if (childNetwork != null) { + for (NetworkedNode child : childNetwork) { + + List nodeHosts = nodeService.findNodeHosts(child.getNode() + .getNodeId()); + NodeSecurity nodeSecurity = nodeService.findNodeSecurity(child.getNode() + .getNodeId()); + + xmlChildNode = new Node(); + xmlChildNode.setNodeId(child.getNode().getNodeId()); + xmlChildNode.setExternalId(child.getNode().getExternalId()); + xmlChildNode.setRegistrationServer(false); + xmlChildNode.setSyncUrl(child.getNode().getSyncUrl()); + + xmlChildNode.setBatchInErrorCount(child.getNode().getBatchInErrorCount()); + xmlChildNode.setBatchToSendCount(child.getNode().getBatchToSendCount()); + if (nodeHosts.size() > 0) { + xmlChildNode.setLastHeartbeat(nodeHosts.get(0).getHeartbeatTime()); + } + xmlChildNode.setRegistered(nodeSecurity.hasRegistered()); + xmlChildNode.setInitialLoaded(nodeSecurity.hasInitialLoaded()); + xmlChildNode + .setReverseInitialLoaded(nodeSecurity.hasReverseInitialLoaded()); + if (child.getNode().getCreatedAtNodeId() == null) { + xmlChildNode.setRegistrationServer(true); + } + children.addNode(xmlChildNode); + } + } + } + } else { + throw new NotFoundException(); + } + return children; + } + + private Node nodeImpl(ISymmetricEngine engine) { + + Node xmlNode = new Node(); + if (isRegistered(engine)) { + INodeService nodeService = engine.getNodeService(); + org.jumpmind.symmetric.model.Node modelNode = nodeService.findIdentity(); + List nodeHosts = nodeService.findNodeHosts(modelNode.getNodeId()); + NodeSecurity nodeSecurity = nodeService.findNodeSecurity(modelNode.getNodeId()); + xmlNode.setNodeId(modelNode.getNodeId()); + xmlNode.setExternalId(modelNode.getExternalId()); + xmlNode.setSyncUrl(modelNode.getSyncUrl()); + xmlNode.setRegistrationUrl(engine.getParameterService().getRegistrationUrl()); + xmlNode.setBatchInErrorCount(modelNode.getBatchInErrorCount()); + xmlNode.setBatchToSendCount(modelNode.getBatchToSendCount()); + if (nodeHosts.size() > 0) { + xmlNode.setLastHeartbeat(nodeHosts.get(0).getHeartbeatTime()); + } + xmlNode.setHeartbeatInterval(engine.getParameterService().getInt( + ParameterConstants.HEARTBEAT_JOB_PERIOD_MS)); + xmlNode.setRegistered(nodeSecurity.hasRegistered()); + xmlNode.setInitialLoaded(nodeSecurity.hasInitialLoaded()); + xmlNode.setReverseInitialLoaded(nodeSecurity.hasReverseInitialLoaded()); + if (modelNode.getCreatedAtNodeId() == null) { + xmlNode.setRegistrationServer(true); + } else { + xmlNode.setRegistrationServer(false); + } + xmlNode.setCreatedAtNodeId(modelNode.getCreatedAtNodeId()); + } else { + throw new NotFoundException(); + } + return xmlNode; + } + + private boolean isRootNode(ISymmetricEngine engine, org.jumpmind.symmetric.model.Node node) { + INodeService nodeService = engine.getNodeService(); + org.jumpmind.symmetric.model.Node modelNode = nodeService.findIdentity(); + if (modelNode.getCreatedAtNodeId() == null + || modelNode.getCreatedAtNodeId().equalsIgnoreCase(modelNode.getExternalId())) { + return true; + } else { + return false; + } + } + + private boolean isRegistered(ISymmetricEngine engine) { + boolean registered = true; + INodeService nodeService = engine.getNodeService(); + org.jumpmind.symmetric.model.Node modelNode = nodeService.findIdentity(); + if (modelNode == null) { + registered = false; + } else { + NodeSecurity nodeSecurity = nodeService.findNodeSecurity(modelNode.getNodeId()); + if (nodeSecurity == null) { + registered = false; + } + } + return registered; + } + + private NodeStatus nodeStatusImpl(ISymmetricEngine engine) { + + NodeStatus status = new NodeStatus(); + if (isRegistered(engine)) { + INodeService nodeService = engine.getNodeService(); + org.jumpmind.symmetric.model.Node modelNode = nodeService.findIdentity(); + NodeSecurity nodeSecurity = nodeService.findNodeSecurity(modelNode.getNodeId()); + List nodeHost = nodeService.findNodeHosts(modelNode.getNodeId()); + status.setStarted(engine.isStarted()); + status.setRegistered(nodeSecurity.getRegistrationTime() != null); + status.setInitialLoaded(nodeSecurity.getInitialLoadTime() != null); + status.setReverseInitialLoaded(nodeSecurity.getRevInitialLoadTime() != null); + status.setNodeId(modelNode.getNodeId()); + status.setNodeGroupId(modelNode.getNodeGroupId()); + status.setExternalId(modelNode.getExternalId()); + status.setSyncUrl(modelNode.getSyncUrl()); + status.setRegistrationUrl(engine.getParameterService().getRegistrationUrl()); + status.setDatabaseType(modelNode.getDatabaseType()); + status.setDatabaseVersion(modelNode.getDatabaseVersion()); + status.setSyncEnabled(modelNode.isSyncEnabled()); + status.setCreatedAtNodeId(modelNode.getCreatedAtNodeId()); + status.setBatchToSendCount(engine.getOutgoingBatchService() + .countOutgoingBatchesUnsent()); + status.setBatchInErrorCount(engine.getOutgoingBatchService() + .countOutgoingBatchesInError()); + status.setDeploymentType(modelNode.getDeploymentType()); + if (modelNode.getCreatedAtNodeId() == null) { + status.setRegistrationServer(true); + } else { + status.setRegistrationServer(false); + } + if (nodeHost != null && nodeHost.size() > 0) { + status.setLastHeartbeat(nodeHost.get(0).getHeartbeatTime()); + } + status.setHeartbeatInterval(engine.getParameterService().getInt( + ParameterConstants.HEARTBEAT_SYNC_ON_PUSH_PERIOD_SEC)); + if (status.getHeartbeatInterval() == 0) { + status.setHeartbeatInterval(600); + } + } else { + throw new NotFoundException(); + } + return status; + } + + private Set channelStatusImpl(ISymmetricEngine engine) { + HashSet channelStatus = new HashSet(); + List channels = engine.getConfigurationService().getNodeChannels(false); + for (NodeChannel nodeChannel : channels) { + String channelId = nodeChannel.getChannelId(); + ChannelStatus status = new ChannelStatus(); + status.setChannelId(channelId); + int outgoingInError = engine.getOutgoingBatchService().countOutgoingBatchesInError( + channelId); + int incomingInError = engine.getIncomingBatchService().countIncomingBatchesInError( + channelId); + status.setBatchInErrorCount(outgoingInError); + status.setBatchToSendCount(engine.getOutgoingBatchService().countOutgoingBatchesUnsent( + channelId)); + status.setIncomingError(incomingInError > 0); + status.setOutgoingError(outgoingInError > 0); + status.setEnabled(nodeChannel.isEnabled()); + status.setIgnoreEnabled(nodeChannel.isIgnoreEnabled()); + status.setSuspendEnabled(nodeChannel.isSuspendEnabled()); + channelStatus.add(status); + } + return channelStatus; + } + + private QueryResults queryNodeImpl(ISymmetricEngine engine, String sql, boolean isQuery) { + + QueryResults results = new QueryResults(); + org.jumpmind.symmetric.web.rest.model.Row xmlRow = null; + org.jumpmind.symmetric.web.rest.model.Column xmlColumn = null; + + ISqlTemplate sqlTemplate = engine.getSqlTemplate(); + try { + if(!isQuery){ + int updates = sqlTemplate.update(sql); + results.setNbrResults(updates); + return results; + } + + List rows = sqlTemplate.query(sql); + int nbrRows = 0; + for (Row row : rows) { + xmlRow = new org.jumpmind.symmetric.web.rest.model.Row(); + Iterator> itr = row.entrySet().iterator(); + int columnOrdinal = 0; + while (itr.hasNext()) { + xmlColumn = new org.jumpmind.symmetric.web.rest.model.Column(); + xmlColumn.setOrdinal(++columnOrdinal); + Map.Entry pair = (Map.Entry) itr.next(); + xmlColumn.setName(pair.getKey()); + if (pair.getValue() != null) { + xmlColumn.setValue(pair.getValue().toString()); + } + xmlRow.getColumnData().add(xmlColumn); + } + xmlRow.setRowNum(++nbrRows); + results.getResults().add(xmlRow); + } + results.setNbrResults(nbrRows); + } catch (Exception ex) { + log.error("Exception while executing sql.", ex); + throw new NotAllowedException("Error while executing sql %s. Error is %s", sql, ex + .getCause().getMessage()); + } + return results; + } + + protected SymmetricEngineHolder getSymmetricEngineHolder() { + SymmetricEngineHolder holder = (SymmetricEngineHolder) context + .getAttribute(WebConstants.ATTR_ENGINE_HOLDER); + if (holder == null) { + throw new NotFoundException(); + } + return holder; + } + + protected ISymmetricEngine getSymmetricEngine(String engineName) { + SymmetricEngineHolder holder = getSymmetricEngineHolder(); + + ISymmetricEngine engine = null; + if (StringUtils.isNotBlank(engineName)) { + engine = holder.getEngines().get(engineName); + } + if (engine == null) { + throw new NotFoundException(); + } else if (!engine.getParameterService().is(ParameterConstants.REST_API_ENABLED)) { + throw new NotAllowedException("The REST API was not enabled for %s", + engine.getEngineName()); + } else { + MDC.put("engineName", engine.getEngineName()); + return engine; + } + } + + protected boolean securityVerified(String nodeId, ISymmetricEngine engine, String securityToken) { + + INodeService nodeService = engine.getNodeService(); + boolean allowed = false; + org.jumpmind.symmetric.model.Node targetNode = nodeService.findNode(nodeId); + if (targetNode != null) { + NodeSecurity security = nodeService.findNodeSecurity(nodeId); + allowed = security.getNodePassword().equals(securityToken); + } + return allowed; + } + + protected ISymmetricEngine getSymmetricEngine() { + ISymmetricEngine engine = null; + SymmetricEngineHolder holder = getSymmetricEngineHolder(); + + if (holder.getEngines().size() > 0) { + engine = holder.getEngines().values().iterator().next(); + } + + if (engine == null) { + throw new NotAllowedException(); + } else if (!engine.getParameterService().is(ParameterConstants.REST_API_ENABLED)) { + throw new NotAllowedException("The REST API was not enabled for %s", + engine.getEngineName()); + } else { + return engine; + } + + } + +} diff --git a/symmetric-server/src/test/java/org/jumpmind/symmetric/test/AbstractIntegrationTest.java b/symmetric-server/src/test/java/org/jumpmind/symmetric/test/AbstractIntegrationTest.java index d16206606e..7d8e39a3f3 100644 --- a/symmetric-server/src/test/java/org/jumpmind/symmetric/test/AbstractIntegrationTest.java +++ b/symmetric-server/src/test/java/org/jumpmind/symmetric/test/AbstractIntegrationTest.java @@ -39,7 +39,7 @@ import org.jumpmind.symmetric.TestConstants; import org.jumpmind.symmetric.common.ParameterConstants; import org.jumpmind.symmetric.common.SystemConstants; -import org.jumpmind.symmetric.model.IncomingBatch.Status; +import org.jumpmind.symmetric.model.AbstractBatch.Status; import org.jumpmind.symmetric.model.OutgoingBatches; import org.jumpmind.symmetric.model.RemoteNodeStatuses; import org.jumpmind.symmetric.service.IOutgoingBatchService; diff --git a/symmetric-server/src/test/java/org/jumpmind/symmetric/test/AbstractTest.java b/symmetric-server/src/test/java/org/jumpmind/symmetric/test/AbstractTest.java index cd2af9533a..83000c6592 100644 --- a/symmetric-server/src/test/java/org/jumpmind/symmetric/test/AbstractTest.java +++ b/symmetric-server/src/test/java/org/jumpmind/symmetric/test/AbstractTest.java @@ -50,7 +50,7 @@ import org.jumpmind.symmetric.common.ParameterConstants; import org.jumpmind.symmetric.common.SystemConstants; import org.jumpmind.symmetric.model.IncomingBatch; -import org.jumpmind.symmetric.model.IncomingBatch.Status; +import org.jumpmind.symmetric.model.AbstractBatch.Status; import org.jumpmind.symmetric.model.RemoteNodeStatuses; import org.jumpmind.symmetric.service.IDataLoaderService; import org.jumpmind.util.AppUtils; diff --git a/symmetric-server/src/test/java/org/jumpmind/symmetric/test/MultiNodeGroupSharedTablesRoutingTest.java b/symmetric-server/src/test/java/org/jumpmind/symmetric/test/MultiNodeGroupSharedTablesRoutingTest.java index 37943da3bc..87bdc359bb 100644 --- a/symmetric-server/src/test/java/org/jumpmind/symmetric/test/MultiNodeGroupSharedTablesRoutingTest.java +++ b/symmetric-server/src/test/java/org/jumpmind/symmetric/test/MultiNodeGroupSharedTablesRoutingTest.java @@ -61,10 +61,10 @@ protected void test(ISymmetricEngine rootServer, ISymmetricEngine clientServer) assertEquals(1, batches1.size()); assertEquals(1, batches2.size()); - assertEquals(30, batches1.get(0).getDataEventCount()); - assertEquals(30, batches1.get(0).getInsertEventCount()); - assertEquals(10, batches2.get(0).getDataEventCount()); - assertEquals(10, batches2.get(0).getInsertEventCount()); + assertEquals(30, batches1.get(0).getDataRowCount()); + assertEquals(30, batches1.get(0).getDataInsertRowCount()); + assertEquals(10, batches2.get(0).getDataRowCount()); + assertEquals(10, batches2.get(0).getDataInsertRowCount()); assertEquals(30, template(rootServer).queryForInt("select count(*) from sym_data_event where batch_id=?", batches1.get(0).getBatchId())); assertEquals(10, template(rootServer).queryForInt("select count(*) from sym_data_event where batch_id=?", batches2.get(0).getBatchId()));