diff --git a/symmetric/symmetric-jdbc/pom.xml b/symmetric/symmetric-jdbc/pom.xml
new file mode 100644
index 0000000000..bc7b112d21
--- /dev/null
+++ b/symmetric/symmetric-jdbc/pom.xml
@@ -0,0 +1,98 @@
+
+ 4.0.0
+ org.jumpmind.symmetric
+ symmetric-jdbc
+ jar
+ 3.0.0-SNAPSHOT
+ jdbc
+ http://symmetricds.org
+
+
+ org.jumpmind.symmetric
+ symmetric-parent
+ 3.0.0-SNAPSHOT
+ ../symmetric-parent/pom.xml
+
+
+
+
+
+ maven-jar-plugin
+ 2.2
+
+
+
+ test-jar
+
+ test-compile
+
+
+
+ ${basedir}\target
+
+
+
+
+
+
+
+ org.jumpmind.symmetric
+ symmetric-util
+
+
+ org.jumpmind.symmetric
+ symmetric-db
+
+
+ junit
+ junit
+ test
+
+
+ log4j
+ log4j
+ test
+
+
+ org.springframework
+ spring-jdbc
+
+
+
+ org.apache.derby
+ derby
+ true
+
+
+ org.apache.derby
+ derbytools
+ true
+
+
+ mysql
+ mysql-connector-java
+ true
+
+
+ postgresql
+ postgresql
+ true
+
+
+ net.sourceforge.jtds
+ jtds
+ true
+
+
+ org.hsqldb
+ hsqldb
+ true
+
+
+ com.h2database
+ h2
+ true
+
+
+
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/AbstractJdbcDatabasePlatform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/AbstractJdbcDatabasePlatform.java
new file mode 100644
index 0000000000..30d039b6c6
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/AbstractJdbcDatabasePlatform.java
@@ -0,0 +1,81 @@
+package org.jumpmind.db.platform;
+
+import java.sql.SQLException;
+
+import javax.sql.DataSource;
+
+import org.jumpmind.db.AbstractDatabasePlatform;
+import org.jumpmind.db.sql.ISqlTemplate;
+import org.jumpmind.db.sql.jdbc.JdbcSqlTemplate;
+import org.jumpmind.log.Log;
+
+abstract public class AbstractJdbcDatabasePlatform extends AbstractDatabasePlatform {
+
+ protected DataSource dataSource;
+
+ protected ISqlTemplate sqlTemplate;
+
+ protected DatabasePlatformSettings settings;
+
+ public AbstractJdbcDatabasePlatform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(log);
+ this.dataSource = dataSource;
+ this.settings = settings;
+ createSqlTemplate();
+ }
+
+ protected void createSqlTemplate() {
+ this.sqlTemplate = new JdbcSqlTemplate(dataSource, settings, null);
+ }
+
+ @Override
+ public ISqlTemplate getSqlTemplate() {
+ return sqlTemplate;
+ }
+
+ public boolean isPrimaryKeyViolation(Exception ex) {
+ boolean primaryKeyViolation = false;
+ if (primaryKeyViolationCodes != null || primaryKeyViolationSqlStates != null) {
+ SQLException sqlEx = findSQLException(ex);
+ if (sqlEx != null) {
+ if (primaryKeyViolationCodes != null) {
+ int errorCode = sqlEx.getErrorCode();
+ for (int primaryKeyViolationCode : primaryKeyViolationCodes) {
+ if (primaryKeyViolationCode == errorCode) {
+ primaryKeyViolation = true;
+ break;
+ }
+ }
+ }
+
+ if (primaryKeyViolationSqlStates != null) {
+ String sqlState = sqlEx.getSQLState();
+ if (sqlState != null) {
+ for (String primaryKeyViolationSqlState : primaryKeyViolationSqlStates) {
+ if (primaryKeyViolationSqlState != null
+ && primaryKeyViolationSqlState.equals(sqlState)) {
+ primaryKeyViolation = true;
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return primaryKeyViolation;
+ }
+
+ protected SQLException findSQLException(Throwable ex) {
+ if (ex instanceof SQLException) {
+ return (SQLException) ex;
+ } else {
+ Throwable cause = ex.getCause();
+ if (cause != null && !cause.equals(ex)) {
+ return findSQLException(cause);
+ }
+ }
+ return null;
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/AbstractJdbcDdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/AbstractJdbcDdlReader.java
new file mode 100644
index 0000000000..910e89f8f6
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/AbstractJdbcDdlReader.java
@@ -0,0 +1,1275 @@
+package org.jumpmind.db.platform;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Types;
+import java.text.Collator;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang.StringUtils;
+import org.jumpmind.db.DatabasePlatformInfo;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.IDdlReader;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.IndexColumn;
+import org.jumpmind.db.model.NonUniqueIndex;
+import org.jumpmind.db.model.Reference;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.model.UniqueIndex;
+import org.jumpmind.db.sql.jdbc.IConnectionCallback;
+import org.jumpmind.db.sql.jdbc.JdbcSqlTemplate;
+import org.jumpmind.log.Log;
+import org.jumpmind.log.LogFactory;
+
+/*
+ * An utility class to create a Database model from a live database.
+ */
+public abstract class AbstractJdbcDdlReader implements IDdlReader {
+
+ /* The Log to which logging calls will be made. */
+ protected Log log = LogFactory.getLog(getClass());
+
+ /* The descriptors for the relevant columns in the table meta data. */
+ private final List _columnsForTable;
+
+ /* The descriptors for the relevant columns in the table column meta data. */
+ private final List _columnsForColumn;
+
+ /* The descriptors for the relevant columns in the primary key meta data. */
+ private final List _columnsForPK;
+
+ /* The descriptors for the relevant columns in the foreign key meta data. */
+ private final List _columnsForFK;
+
+ /* The descriptors for the relevant columns in the index meta data. */
+ private final List _columnsForIndex;
+
+ /* The platform that this model reader belongs to. */
+ private IDatabasePlatform platform;
+ /*
+ * Contains default column sizes (minimum sizes that a JDBC-compliant db
+ * must support).
+ */
+ private HashMap _defaultSizes = new HashMap();
+
+ /* The default database catalog to read. */
+ private String _defaultCatalogPattern = "%";
+
+ /* The default database schema(s) to read. */
+ private String _defaultSchemaPattern = "%";
+
+ /* The default pattern for reading all tables. */
+ private String _defaultTablePattern = "%";
+
+ /* The default pattern for reading all columns. */
+ private String _defaultColumnPattern;
+
+ /* The table types to recognize per default. */
+ private String[] _defaultTableTypes = { "TABLE" };
+
+ public AbstractJdbcDdlReader(Log log, IDatabasePlatform platform) {
+ this.platform = platform;
+
+ _defaultSizes.put(new Integer(Types.CHAR), "254");
+ _defaultSizes.put(new Integer(Types.VARCHAR), "254");
+ _defaultSizes.put(new Integer(Types.LONGVARCHAR), "254");
+ _defaultSizes.put(new Integer(Types.BINARY), "254");
+ _defaultSizes.put(new Integer(Types.VARBINARY), "254");
+ _defaultSizes.put(new Integer(Types.LONGVARBINARY), "254");
+ _defaultSizes.put(new Integer(Types.INTEGER), "32");
+ _defaultSizes.put(new Integer(Types.BIGINT), "64");
+ _defaultSizes.put(new Integer(Types.REAL), "7,0");
+ _defaultSizes.put(new Integer(Types.FLOAT), "15,0");
+ _defaultSizes.put(new Integer(Types.DOUBLE), "15,0");
+ _defaultSizes.put(new Integer(Types.DECIMAL), "15,15");
+ _defaultSizes.put(new Integer(Types.NUMERIC), "15,15");
+
+ _columnsForTable = initColumnsForTable();
+ _columnsForColumn = initColumnsForColumn();
+ _columnsForPK = initColumnsForPK();
+ _columnsForFK = initColumnsForFK();
+ _columnsForIndex = initColumnsForIndex();
+ }
+
+ /*
+ * Returns the platform that this model reader belongs to.
+ *
+ * @return The platform
+ */
+ public IDatabasePlatform getPlatform() {
+ return platform;
+ }
+
+ /*
+ * Returns the platform specific settings.
+ *
+ * @return The platform settings
+ */
+ public DatabasePlatformInfo getPlatformInfo() {
+ return platform.getPlatformInfo();
+ }
+
+ /*
+ * Returns descriptors for the columns that shall be read from the result
+ * set when reading the meta data for a table. Note that the columns are
+ * read in the order defined by this list. Redefine this method if you
+ * want more columns or a different order.
+ *
+ * @return The descriptors for the result set columns
+ */
+ protected List initColumnsForTable() {
+ List result = new ArrayList();
+
+ result.add(new MetaDataColumnDescriptor("TABLE_NAME", Types.VARCHAR));
+ result.add(new MetaDataColumnDescriptor("TABLE_TYPE", Types.VARCHAR, "UNKNOWN"));
+ result.add(new MetaDataColumnDescriptor("TABLE_CAT", Types.VARCHAR));
+ result.add(new MetaDataColumnDescriptor("TABLE_SCHEM", Types.VARCHAR));
+ result.add(new MetaDataColumnDescriptor("REMARKS", Types.VARCHAR));
+
+ return result;
+ }
+
+ /*
+ * Returns descriptors for the columns that shall be read from the result
+ * set when reading the meta data for table columns. Note that the columns
+ * are read in the order defined by this list. Redefine this method if
+ * you want more columns or a different order.
+ *
+ * @return The map column name -> descriptor for the result set columns
+ */
+ protected List initColumnsForColumn() {
+ List result = new ArrayList();
+
+ // As suggested by Alexandre Borgoltz, we're reading the COLUMN_DEF
+ // first because Oracle
+ // has problems otherwise (it seemingly requires a LONG column to be the
+ // first to be read)
+ // See also DDLUTILS-29
+ result.add(new MetaDataColumnDescriptor("COLUMN_DEF", Types.VARCHAR));
+ // we're also reading the table name so that a model reader impl can
+ // filter manually
+ result.add(new MetaDataColumnDescriptor("TABLE_NAME", Types.VARCHAR));
+ result.add(new MetaDataColumnDescriptor("COLUMN_NAME", Types.VARCHAR));
+ result.add(new MetaDataColumnDescriptor("TYPE_NAME", Types.VARCHAR));
+ result.add(new MetaDataColumnDescriptor("DATA_TYPE", Types.INTEGER, new Integer(
+ java.sql.Types.OTHER)));
+ result.add(new MetaDataColumnDescriptor("NUM_PREC_RADIX", Types.INTEGER, new Integer(10)));
+ result.add(new MetaDataColumnDescriptor("DECIMAL_DIGITS", Types.INTEGER, new Integer(0)));
+ result.add(new MetaDataColumnDescriptor("COLUMN_SIZE", Types.VARCHAR));
+ result.add(new MetaDataColumnDescriptor("IS_NULLABLE", Types.VARCHAR, "YES"));
+ result.add(new MetaDataColumnDescriptor("IS_AUTOINCREMENT", Types.VARCHAR, "YES"));
+ result.add(new MetaDataColumnDescriptor("REMARKS", Types.VARCHAR));
+
+ return result;
+ }
+
+ /*
+ * Returns descriptors for the columns that shall be read from the result
+ * set when reading the meta data for primary keys. Note that the columns
+ * are read in the order defined by this list. Redefine this method if
+ * you want more columns or a different order.
+ *
+ * @return The map column name -> descriptor for the result set columns
+ */
+ protected List initColumnsForPK() {
+ List result = new ArrayList();
+
+ result.add(new MetaDataColumnDescriptor("COLUMN_NAME", Types.VARCHAR));
+ // we're also reading the table name so that a model reader impl can
+ // filter manually
+ result.add(new MetaDataColumnDescriptor("TABLE_NAME", Types.VARCHAR));
+ // the name of the primary key is currently only interesting to the pk
+ // index name resolution
+ result.add(new MetaDataColumnDescriptor("PK_NAME", Types.VARCHAR));
+
+ return result;
+ }
+
+ /*
+ * Returns descriptors for the columns that shall be read from the result
+ * set when reading the meta data for foreign keys originating from a table.
+ * Note that the columns are read in the order defined by this list.
+ * Redefine this method if you want more columns or a different order.
+ *
+ * @return The map column name -> descriptor for the result set columns
+ */
+ protected List initColumnsForFK() {
+ List result = new ArrayList();
+
+ result.add(new MetaDataColumnDescriptor("PKTABLE_NAME", Types.VARCHAR));
+ // we're also reading the table name so that a model reader impl can
+ // filter manually
+ result.add(new MetaDataColumnDescriptor("FKTABLE_NAME", Types.VARCHAR));
+ result.add(new MetaDataColumnDescriptor("KEY_SEQ", Types.TINYINT, new Short((short) 0)));
+ result.add(new MetaDataColumnDescriptor("FK_NAME", Types.VARCHAR));
+ result.add(new MetaDataColumnDescriptor("PKCOLUMN_NAME", Types.VARCHAR));
+ result.add(new MetaDataColumnDescriptor("FKCOLUMN_NAME", Types.VARCHAR));
+ return result;
+ }
+
+ /*
+ * Returns descriptors for the columns that shall be read from the result
+ * set when reading the meta data for indices. Note that the columns are
+ * read in the order defined by this list. Redefine this method if you
+ * want more columns or a different order.
+ *
+ * @return The map column name -> descriptor for the result set columns
+ */
+ protected List initColumnsForIndex() {
+ List result = new ArrayList();
+
+ result.add(new MetaDataColumnDescriptor("INDEX_NAME", Types.VARCHAR));
+ // we're also reading the table name so that a model reader impl can
+ // filter manually
+ result.add(new MetaDataColumnDescriptor("TABLE_NAME", Types.VARCHAR));
+ result.add(new MetaDataColumnDescriptor("NON_UNIQUE", Types.BIT, Boolean.TRUE));
+ result.add(new MetaDataColumnDescriptor("ORDINAL_POSITION", Types.TINYINT, new Short(
+ (short) 0)));
+ result.add(new MetaDataColumnDescriptor("COLUMN_NAME", Types.VARCHAR));
+ result.add(new MetaDataColumnDescriptor("TYPE", Types.TINYINT));
+ return result;
+ }
+
+ /*
+ * Returns the catalog(s) in the database to read per default.
+ *
+ * @return The default catalog(s)
+ */
+ public String getDefaultCatalogPattern() {
+ return _defaultCatalogPattern;
+ }
+
+ /*
+ * Sets the catalog(s) in the database to read per default.
+ *
+ * @param catalogPattern The catalog(s)
+ */
+ public void setDefaultCatalogPattern(String catalogPattern) {
+ _defaultCatalogPattern = catalogPattern;
+ }
+
+ /*
+ * Returns the schema(s) in the database to read per default.
+ *
+ * @return The default schema(s)
+ */
+ public String getDefaultSchemaPattern() {
+ return _defaultSchemaPattern;
+ }
+
+ /*
+ * Sets the schema(s) in the database to read per default.
+ *
+ * @param schemaPattern The schema(s)
+ */
+ public void setDefaultSchemaPattern(String schemaPattern) {
+ _defaultSchemaPattern = schemaPattern;
+ }
+
+ /*
+ * Returns the default pattern to read the relevant tables from the
+ * database.
+ *
+ * @return The table pattern
+ */
+ public String getDefaultTablePattern() {
+ return _defaultTablePattern;
+ }
+
+ /*
+ * Sets the default pattern to read the relevant tables from the database.
+ *
+ * @param tablePattern The table pattern
+ */
+ public void setDefaultTablePattern(String tablePattern) {
+ _defaultTablePattern = tablePattern;
+ }
+
+ /*
+ * Returns the default pattern to read the relevant columns from the
+ * database.
+ *
+ * @return The column pattern
+ */
+ public String getDefaultColumnPattern() {
+ return _defaultColumnPattern;
+ }
+
+ /*
+ * Sets the default pattern to read the relevant columns from the database.
+ *
+ * @param columnPattern The column pattern
+ */
+ public void setDefaultColumnPattern(String columnPattern) {
+ _defaultColumnPattern = columnPattern;
+ }
+
+ /*
+ * Returns the table types to recognize per default.
+ *
+ * @return The default table types
+ */
+ public String[] getDefaultTableTypes() {
+ return _defaultTableTypes;
+ }
+
+ /*
+ * Sets the table types to recognize per default. Typical types are "TABLE",
+ * "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY", "LOCAL TEMPORARY", "ALIAS",
+ * "SYNONYM".
+ *
+ * @param types The table types
+ */
+ public void setDefaultTableTypes(String[] types) {
+ _defaultTableTypes = types;
+ }
+
+ /*
+ * Returns the descriptors for the columns to be read from the table meta
+ * data result set.
+ *
+ * @return The column descriptors
+ */
+ protected List getColumnsForTable() {
+ return _columnsForTable;
+ }
+
+ /*
+ * Returns the descriptors for the columns to be read from the column meta
+ * data result set.
+ *
+ * @return The column descriptors
+ */
+ protected List getColumnsForColumn() {
+ return _columnsForColumn;
+ }
+
+ /*
+ * Returns the descriptors for the columns to be read from the primary key
+ * meta data result set.
+ *
+ * @return The column descriptors
+ */
+ protected List getColumnsForPK() {
+ return _columnsForPK;
+ }
+
+ /*
+ * Returns the descriptors for the columns to be read from the foreign key
+ * meta data result set.
+ *
+ * @return The column descriptors
+ */
+ protected List getColumnsForFK() {
+ return _columnsForFK;
+ }
+
+ /*
+ * Returns the descriptors for the columns to be read from the index meta
+ * data result set.
+ *
+ * @return The column descriptors
+ */
+ protected List getColumnsForIndex() {
+ return _columnsForIndex;
+ }
+
+ /*
+ * Reads the database model from the given connection.
+ *
+ * @param connection The connection
+ *
+ * @param name The name of the resulting database; null when
+ * the default name (the catalog) is desired which might be
+ * null itself though
+ *
+ * @return The database model
+ */
+ public Database getDatabase(Connection connection) throws SQLException {
+ return readTables(null, null, null);
+ }
+
+ /*
+ * Reads the database model from the given connection.
+ *
+ * @param catalog The catalog to access in the database; use
+ * null for the default value
+ *
+ * @param schema The schema to access in the database; use null
+ * for the default value
+ *
+ * @param tableTypes The table types to process; use null or an
+ * empty list for the default ones
+ *
+ * @return The database model
+ */
+ public Database readTables(final String catalog, final String schema, final String[] tableTypes) {
+ JdbcSqlTemplate sqlTemplate = (JdbcSqlTemplate) platform.getSqlTemplate();
+ return postprocessModelFromDatabase(sqlTemplate.execute(new IConnectionCallback() {
+ public Database execute(Connection connection) throws SQLException {
+ Database db = new Database();
+ db.setName(Table.getQualifiedTablePrefix(catalog, schema));
+ db.addTables(readTables(connection, catalog, schema, tableTypes));
+ // Note that we do this here instead of in readTable since
+ // platforms may
+ // redefine the readTable method whereas it is highly unlikely
+ // that this method gets
+ // redefined
+ if (getPlatform().isForeignKeysSorted()) {
+ sortForeignKeys(db);
+ }
+ db.initialize();
+ return db;
+ }
+ }));
+ }
+
+ /*
+ * Allows the platform to postprocess the model just read from the database.
+ *
+ * @param model The model
+ */
+ protected Database postprocessModelFromDatabase(Database model) {
+ // Default values for CHAR/VARCHAR/LONGVARCHAR columns have quotation
+ // marks around them which we'll remove now
+ for (int tableIdx = 0; tableIdx < model.getTableCount(); tableIdx++) {
+ postprocessTableFromDatabase(model.getTable(tableIdx));
+ }
+ return model;
+ }
+
+ /*
+ * Reads the tables from the database metadata.
+ *
+ * @param catalog The catalog to acess in the database; use
+ * null for the default value
+ *
+ * @param schemaPattern The schema(s) to acess in the database; use
+ * null for the default value
+ *
+ * @param tableTypes The table types to process; use null or an
+ * empty list for the default ones
+ *
+ * @return The tables
+ */
+ protected Collection
() {
+ public Table execute(Connection connection) throws SQLException {
+ DatabaseMetaDataWrapper metaData = new DatabaseMetaDataWrapper();
+ metaData.setMetaData(connection.getMetaData());
+ metaData.setCatalog(catalog);
+ metaData.setSchemaPattern(schema);
+ metaData.setTableTypes(null);
+ String tableName = table;
+ if (getPlatformInfo().isStoresUpperCaseInCatalog()) {
+ tableName = tableName.toUpperCase();
+ }
+
+ ResultSet tableData = null;
+ try {
+ tableData = metaData.getTables(getTableNamePattern(tableName));
+ if (tableData != null && tableData.next()) {
+ Map values = readColumns(tableData, initColumnsForTable());
+ return readTable(connection, metaData, values);
+ } else {
+ return null;
+ }
+ } finally {
+ close(tableData);
+ }
+ }
+ }));
+
+ }
+
+ protected Table postprocessTableFromDatabase(Table table) {
+ if (table != null) {
+ for (int columnIdx = 0; columnIdx < table.getColumnCount(); columnIdx++) {
+ Column column = table.getColumn(columnIdx);
+
+ if (TypeMap.isTextType(column.getTypeCode())
+ || TypeMap.isDateTimeType(column.getTypeCode())) {
+ String defaultValue = column.getDefaultValue();
+
+ if ((defaultValue != null) && (defaultValue.length() >= 2)
+ && defaultValue.startsWith("'") && defaultValue.endsWith("'")) {
+ defaultValue = defaultValue.substring(1, defaultValue.length() - 1);
+ column.setDefaultValue(defaultValue);
+ }
+ }
+ }
+ }
+ return table;
+ }
+
+
+ protected void close(ResultSet rs) {
+ if (rs != null) {
+ try {
+ rs.close();
+ } catch (SQLException ex) {
+
+ }
+ }
+ }
+
+ protected void close(Statement stmt) {
+ if (stmt != null) {
+ try {
+ stmt.close();
+ } catch (SQLException ex) {
+
+ }
+ }
+ }
+
+ protected String getTableNamePattern(String tableName) {
+ return tableName;
+ }
+
+ /*
+ * Reads the next table from the meta data.
+ *
+ * @param metaData The database meta data
+ *
+ * @param values The table metadata values as defined by {@link
+ * #getColumnsForTable()}
+ *
+ * @return The table or null if the result set row did not
+ * contain a valid table
+ */
+ protected Table readTable(Connection connection, DatabaseMetaDataWrapper metaData,
+ Map values) throws SQLException {
+ String tableName = (String) values.get("TABLE_NAME");
+ Table table = null;
+
+ if ((tableName != null) && (tableName.length() > 0)) {
+ table = new Table();
+
+ table.setName(tableName);
+ table.setType((String) values.get("TABLE_TYPE"));
+ table.setCatalog((String) values.get("TABLE_CAT"));
+ table.setSchema((String) values.get("TABLE_SCHEM"));
+ table.setDescription((String) values.get("REMARKS"));
+
+ table.addColumns(readColumns(metaData, tableName));
+ table.addForeignKeys(readForeignKeys(connection, metaData, tableName));
+ table.addIndices(readIndices(connection, metaData, tableName));
+
+ Collection primaryKeys = readPrimaryKeyNames(metaData, tableName);
+
+ for (Iterator it = primaryKeys.iterator(); it.hasNext();) {
+ table.findColumn(it.next(), true).setPrimaryKey(true);
+ }
+
+ if (getPlatformInfo().isSystemIndicesReturned()) {
+ removeSystemIndices(connection, metaData, table);
+ }
+ }
+ return table;
+ }
+
+ /*
+ * Removes system indices (generated by the database for primary and foreign
+ * keys) from the table.
+ *
+ * @param metaData The database meta data
+ *
+ * @param table The table
+ */
+ protected void removeSystemIndices(Connection connection, DatabaseMetaDataWrapper metaData,
+ Table table) throws SQLException {
+ removeInternalPrimaryKeyIndex(connection, metaData, table);
+
+ for (int fkIdx = 0; fkIdx < table.getForeignKeyCount(); fkIdx++) {
+ removeInternalForeignKeyIndex(connection, metaData, table, table.getForeignKey(fkIdx));
+ }
+ }
+
+ /*
+ * Tries to remove the internal index for the table's primary key.
+ *
+ * @param metaData The database meta data
+ *
+ * @param table The table
+ */
+ protected void removeInternalPrimaryKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table) throws SQLException {
+ Column[] pks = table.getPrimaryKeyColumns();
+ List columnNames = new ArrayList();
+
+ for (int columnIdx = 0; columnIdx < pks.length; columnIdx++) {
+ columnNames.add(pks[columnIdx].getName());
+ }
+
+ for (int indexIdx = 0; indexIdx < table.getIndexCount();) {
+ IIndex index = table.getIndex(indexIdx);
+
+ if (index.isUnique() && matches(index, columnNames)
+ && isInternalPrimaryKeyIndex(connection, metaData, table, index)) {
+ table.removeIndex(indexIdx);
+ } else {
+ indexIdx++;
+ }
+ }
+ }
+
+ /*
+ * Tries to remove the internal index for the given foreign key.
+ *
+ * @param metaData The database meta data
+ *
+ * @param table The table where the table is defined
+ *
+ * @param fk The foreign key
+ */
+ protected void removeInternalForeignKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, ForeignKey fk) throws SQLException {
+ List columnNames = new ArrayList();
+ boolean mustBeUnique = !getPlatformInfo().isSystemForeignKeyIndicesAlwaysNonUnique();
+
+ for (int columnIdx = 0; columnIdx < fk.getReferenceCount(); columnIdx++) {
+ String name = fk.getReference(columnIdx).getLocalColumnName();
+ Column localColumn = table
+ .findColumn(name, getPlatform().isDelimitedIdentifierModeOn());
+
+ if (mustBeUnique && !localColumn.isPrimaryKey()) {
+ mustBeUnique = false;
+ }
+ columnNames.add(name);
+ }
+
+ for (int indexIdx = 0; indexIdx < table.getIndexCount();) {
+ IIndex index = table.getIndex(indexIdx);
+
+ if ((mustBeUnique == index.isUnique()) && matches(index, columnNames)
+ && isInternalForeignKeyIndex(connection, metaData, table, fk, index)) {
+ fk.setAutoIndexPresent(true);
+ table.removeIndex(indexIdx);
+ } else {
+ indexIdx++;
+ }
+ }
+ }
+
+ /*
+ * Checks whether the given index matches the column list.
+ *
+ * @param index The index
+ *
+ * @param columnsToSearchFor The names of the columns that the index should
+ * be for
+ *
+ * @return true if the index matches the columns
+ */
+ protected boolean matches(IIndex index, List columnsToSearchFor) {
+ if (index.getColumnCount() != columnsToSearchFor.size()) {
+ return false;
+ }
+ for (int columnIdx = 0; columnIdx < index.getColumnCount(); columnIdx++) {
+ if (!columnsToSearchFor.get(columnIdx).equals(index.getColumn(columnIdx).getName())) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /*
+ * Tries to determine whether the index is the internal database-generated
+ * index for the given table's primary key. Note that only unique indices
+ * with the correct columns are fed to this method. Redefine this method for
+ * specific platforms if there are better ways to determine internal
+ * indices.
+ *
+ * @param metaData The database meta data
+ *
+ * @param table The table owning the index
+ *
+ * @param index The index to check
+ *
+ * @return true if the index seems to be an internal primary
+ * key one
+ */
+ protected boolean isInternalPrimaryKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, IIndex index) throws SQLException {
+ return false;
+ }
+
+ /*
+ * Tries to determine whether the index is the internal database-generated
+ * index for the given foreign key. Note that only non-unique indices with
+ * the correct columns are fed to this method. Redefine this method for
+ * specific platforms if there are better ways to determine internal
+ * indices.
+ *
+ * @param metaData The database meta data
+ *
+ * @param table The table owning the index and foreign key
+ *
+ * @param fk The foreign key
+ *
+ * @param index The index to check
+ *
+ * @return true if the index seems to be an internal primary
+ * key one
+ */
+ protected boolean isInternalForeignKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, ForeignKey fk, IIndex index)
+ throws SQLException {
+ return false;
+ }
+
+ /*
+ * Reads the column definitions for the indicated table.
+ *
+ * @param metaData The database meta data
+ *
+ * @param tableName The name of the table
+ *
+ * @return The columns
+ */
+ protected Collection readColumns(DatabaseMetaDataWrapper metaData, String tableName)
+ throws SQLException {
+ ResultSet columnData = null;
+ try {
+ columnData = metaData.getColumns(getTableNamePattern(tableName),
+ getDefaultColumnPattern());
+
+ List columns = new ArrayList();
+
+ while (columnData.next()) {
+ Map values = readColumns(columnData, getColumnsForColumn());
+
+ columns.add(readColumn(metaData, values));
+ }
+ return columns;
+ } finally {
+ close(columnData);
+ }
+ }
+
+ protected Integer overrideJdbcTypeForColumn(Map values) {
+ return null;
+ }
+
+ /*
+ * Extracts a column definition from the result set.
+ *
+ * @param metaData The database meta data
+ *
+ * @param values The column meta data values as defined by {@link
+ * #getColumnsForColumn()}
+ *
+ * @return The column
+ */
+ protected Column readColumn(DatabaseMetaDataWrapper metaData, Map values)
+ throws SQLException {
+ Column column = new Column();
+ column.setName((String) values.get("COLUMN_NAME"));
+ String defaultValue = (String) values.get("COLUMN_DEF");
+ if (defaultValue != null) {
+ column.setDefaultValue(defaultValue.trim());
+ }
+ Integer jdbcType = overrideJdbcTypeForColumn(values);
+ if (jdbcType != null) {
+ column.setTypeCode(jdbcType);
+ } else {
+ column.setTypeCode((Integer) values.get("DATA_TYPE"));
+ }
+
+ column.setJdbcTypeName((String) values.get("TYPE_NAME"));
+ column.setPrecisionRadix(((Integer) values.get("NUM_PREC_RADIX")).intValue());
+
+ String size = (String) values.get("COLUMN_SIZE");
+ int scale = ((Integer) values.get("DECIMAL_DIGITS")).intValue();
+
+ if (size == null) {
+ size = (String) _defaultSizes.get(new Integer(column.getTypeCode()));
+ }
+ // we're setting the size after the precision and radix in case
+ // the database prefers to return them in the size value
+ column.setSize(size);
+ if (scale != 0) {
+ // if there is a scale value, set it after the size (which probably
+ // did not contain
+ // a scale specification)
+ column.setScale(scale);
+ }
+ column.setRequired("NO".equalsIgnoreCase(((String) values.get("IS_NULLABLE")).trim()));
+ column.setDescription((String) values.get("REMARKS"));
+
+ return column;
+ }
+
+ /*
+ * Retrieves the names of the columns that make up the primary key for a
+ * given table.
+ *
+ * @param metaData The database meta data
+ *
+ * @param tableName The name of the table from which to retrieve PK
+ * information
+ *
+ * @return The primary key column names
+ */
+ protected Collection readPrimaryKeyNames(DatabaseMetaDataWrapper metaData,
+ String tableName) throws SQLException {
+ List pks = new ArrayList();
+ ResultSet pkData = null;
+
+ try {
+ pkData = metaData.getPrimaryKeys(getTableNamePattern(tableName));
+ while (pkData.next()) {
+ Map values = readColumns(pkData, getColumnsForPK());
+
+ pks.add(readPrimaryKeyName(metaData, values));
+ }
+ } finally {
+ close(pkData);
+ }
+ return pks;
+ }
+
+ /*
+ * Extracts a primary key name from the result set.
+ *
+ * @param metaData The database meta data
+ *
+ * @param values The primary key meta data values as defined by {@link
+ * #getColumnsForPK()}
+ *
+ * @return The primary key name
+ */
+ protected String readPrimaryKeyName(DatabaseMetaDataWrapper metaData, Map values)
+ throws SQLException {
+ return (String) values.get("COLUMN_NAME");
+ }
+
+ /*
+ * Retrieves the foreign keys of the indicated table.
+ *
+ * @param metaData The database meta data
+ *
+ * @param tableName The name of the table from which to retrieve FK
+ * information
+ *
+ * @return The foreign keys
+ */
+ protected Collection readForeignKeys(Connection connection,
+ DatabaseMetaDataWrapper metaData, String tableName) throws SQLException {
+ Map fks = new LinkedHashMap();
+ ResultSet fkData = null;
+
+ try {
+ fkData = metaData.getForeignKeys(tableName);
+
+ while (fkData.next()) {
+ Map values = readColumns(fkData, getColumnsForFK());
+
+ readForeignKey(metaData, values, fks);
+ }
+ } finally {
+ close(fkData);
+ }
+ return fks.values();
+ }
+
+ /*
+ * Reads the next foreign key spec from the result set.
+ *
+ * @param metaData The database meta data
+ *
+ * @param values The foreign key meta data as defined by {@link
+ * #getColumnsForFK()}
+ *
+ * @param knownFks The already read foreign keys for the current table
+ */
+ protected void readForeignKey(DatabaseMetaDataWrapper metaData, Map values,
+ Map knownFks) throws SQLException {
+ String fkName = (String) values.get("FK_NAME");
+ ForeignKey fk = (ForeignKey) knownFks.get(fkName);
+
+ if (fk == null) {
+ fk = new ForeignKey(fkName);
+ fk.setForeignTableName((String) values.get("PKTABLE_NAME"));
+ knownFks.put(fkName, fk);
+ }
+
+ Reference ref = new Reference();
+
+ ref.setForeignColumnName((String) values.get("PKCOLUMN_NAME"));
+ ref.setLocalColumnName((String) values.get("FKCOLUMN_NAME"));
+ if (values.containsKey("KEY_SEQ")) {
+ ref.setSequenceValue(((Short) values.get("KEY_SEQ")).intValue());
+ }
+ fk.addReference(ref);
+ }
+
+ /*
+ * Determines the indices for the indicated table.
+ *
+ * @param metaData The database meta data
+ *
+ * @param tableName The name of the table
+ *
+ * @return The list of indices
+ */
+ protected Collection readIndices(Connection connection,
+ DatabaseMetaDataWrapper metaData, String tableName) throws SQLException {
+ Map indices = new LinkedHashMap();
+ ResultSet indexData = null;
+
+ try {
+ indexData = metaData.getIndices(getTableNamePattern(tableName), false, false);
+
+ while (indexData.next()) {
+ Map values = readColumns(indexData, getColumnsForIndex());
+
+ readIndex(metaData, values, indices);
+ }
+ } finally {
+ close(indexData);
+ }
+ return indices.values();
+ }
+
+ /*
+ * Reads the next index spec from the result set.
+ *
+ * @param metaData The database meta data
+ *
+ * @param values The index meta data as defined by {@link
+ * #getColumnsForIndex()}
+ *
+ * @param knownIndices The already read indices for the current table
+ */
+ protected void readIndex(DatabaseMetaDataWrapper metaData, Map values,
+ Map knownIndices) throws SQLException {
+ Short indexType = (Short) values.get("TYPE");
+
+ // we're ignoring statistic indices
+ if ((indexType != null) && (indexType.shortValue() == DatabaseMetaData.tableIndexStatistic)) {
+ return;
+ }
+
+ String indexName = (String) values.get("INDEX_NAME");
+
+ if (indexName != null) {
+ IIndex index = (IIndex) knownIndices.get(indexName);
+
+ if (index == null) {
+ if (((Boolean) values.get("NON_UNIQUE")).booleanValue()) {
+ index = new NonUniqueIndex();
+ } else {
+ index = new UniqueIndex();
+ }
+
+ index.setName(indexName);
+ knownIndices.put(indexName, index);
+ }
+
+ IndexColumn indexColumn = new IndexColumn();
+
+ indexColumn.setName((String) values.get("COLUMN_NAME"));
+ if (values.containsKey("ORDINAL_POSITION")) {
+ indexColumn.setOrdinalPosition(((Short) values.get("ORDINAL_POSITION")).intValue());
+ }
+ index.addColumn(indexColumn);
+ }
+ }
+
+ /*
+ * Reads the indicated columns from the result set.
+ *
+ * @param resultSet The result set
+ *
+ * @param columnDescriptors The dscriptors of the columns to read
+ *
+ * @return The read values keyed by the column name
+ */
+ protected Map readColumns(ResultSet resultSet,
+ List columnDescriptors) throws SQLException {
+ HashMap values = new HashMap();
+ for (Iterator it = columnDescriptors.iterator(); it.hasNext();) {
+ MetaDataColumnDescriptor descriptor = it.next();
+
+ values.put(descriptor.getName(), descriptor.readColumn(resultSet));
+ }
+ return values;
+ }
+
+ protected void determineAutoIncrementFromResultSetMetaData(Connection conn, Table table,
+ final Column columnsToCheck[]) throws SQLException {
+ determineAutoIncrementFromResultSetMetaData(conn, table, columnsToCheck, ".");
+ }
+
+ /*
+ * Helper method that determines the auto increment status for the given
+ * columns via the {@link ResultSetMetaData#isAutoIncrement(int)} method.
+ *
+ * Fix problems following problems: 1) identifiers that use keywords 2)
+ * different catalog and schema 3) different catalog separator character *
+ *
+ * @param table The table
+ *
+ * @param columnsToCheck The columns to check (e.g. the primary key columns)
+ */
+ public void determineAutoIncrementFromResultSetMetaData(Connection conn, Table table,
+ final Column columnsToCheck[], String catalogSeparator) throws SQLException {
+ StringBuilder query = new StringBuilder();
+ try {
+ if (columnsToCheck == null || columnsToCheck.length == 0) {
+ return;
+ }
+ query.append("SELECT ");
+ for (int idx = 0; idx < columnsToCheck.length; idx++) {
+ if (idx > 0) {
+ query.append(",");
+ }
+ query.append("t.");
+ appendIdentifier(query, columnsToCheck[idx].getName());
+ }
+ query.append(" FROM ");
+
+ if (table.getCatalog() != null && !table.getCatalog().trim().equals("")) {
+ appendIdentifier(query, table.getCatalog());
+ query.append(catalogSeparator);
+ }
+ if (table.getSchema() != null && !table.getSchema().trim().equals("")) {
+ appendIdentifier(query, table.getSchema()).append(".");
+ }
+ appendIdentifier(query, table.getName()).append(" t WHERE 1 = 0");
+
+ Statement stmt = null;
+ try {
+ stmt = conn.createStatement();
+ ResultSet rs = stmt.executeQuery(query.toString());
+ ResultSetMetaData rsMetaData = rs.getMetaData();
+
+ for (int idx = 0; idx < columnsToCheck.length; idx++) {
+ if (rsMetaData.isAutoIncrement(idx + 1)) {
+ columnsToCheck[idx].setAutoIncrement(true);
+ }
+ }
+ } finally {
+ if (stmt != null) {
+ stmt.close();
+ }
+ }
+ } catch (SQLException ex) {
+ StringBuilder msg = new StringBuilder(
+ "Failed to determine auto increment columns using this query: '" + query
+ + "'. This is probably not harmful, but should be fixed. ");
+ msg.append("\n");
+ msg.append(table.toString());
+ if (columnsToCheck != null) {
+ for (Column col : columnsToCheck) {
+ msg.append("\n");
+ msg.append(col.toString());
+ }
+ }
+ log.warn(ex, msg.toString());
+ }
+ }
+
+ public StringBuilder appendIdentifier(StringBuilder query, String identifier) {
+ if (getPlatform().isDelimitedIdentifierModeOn()) {
+ query.append(getPlatformInfo().getDelimiterToken());
+ }
+ query.append(identifier);
+ if (getPlatform().isDelimitedIdentifierModeOn()) {
+ query.append(getPlatformInfo().getDelimiterToken());
+ }
+ return query;
+ }
+
+ /*
+ * Sorts the foreign keys in the tables of the model.
+ *
+ * @param model The model
+ */
+ protected void sortForeignKeys(Database model) {
+ for (int tableIdx = 0; tableIdx < model.getTableCount(); tableIdx++) {
+ model.getTable(tableIdx).sortForeignKeys(getPlatform().isDelimitedIdentifierModeOn());
+ }
+ }
+
+ /*
+ * Replaces a specific character sequence in the given text with the
+ * character sequence whose escaped version it is.
+ *
+ * @param text The text
+ *
+ * @param unescaped The unescaped string, e.g. "'"
+ *
+ * @param escaped The escaped version, e.g. "''"
+ *
+ * @return The resulting text
+ */
+ protected String unescape(String text, String unescaped, String escaped) {
+ String result = text;
+
+ // we need special handling if the single quote is escaped via a double
+ // single quote
+ if (result != null) {
+ if (escaped.equals("''")) {
+ if ((result.length() > 2) && result.startsWith("'") && result.endsWith("'")) {
+ result = "'"
+ + StringUtils.replace(result.substring(1, result.length() - 1),
+ escaped, unescaped) + "'";
+ } else {
+ result = StringUtils.replace(result, escaped, unescaped);
+ }
+ } else {
+ result = StringUtils.replace(result, escaped, unescaped);
+ }
+ }
+ return result;
+ }
+
+ /*
+ * Tries to find the schema to which the given table belongs.
+ *
+ * @param connection The database connection
+ *
+ * @param schemaPattern The schema pattern to limit the schemas to search in
+ *
+ * @param table The table to search for
+ *
+ * @return The schema name or null if the schema of the table
+ * could not be found
+ *
+ * @deprecated Will be removed once full schema support is in place
+ */
+ public String determineSchemaOf(Connection connection, String schemaPattern, Table table)
+ throws SQLException {
+ ResultSet tableData = null;
+ ResultSet columnData = null;
+
+ try {
+ DatabaseMetaDataWrapper metaData = new DatabaseMetaDataWrapper();
+
+ metaData.setMetaData(connection.getMetaData());
+ metaData.setCatalog(getDefaultCatalogPattern());
+ metaData.setSchemaPattern(schemaPattern == null ? getDefaultSchemaPattern()
+ : schemaPattern);
+ metaData.setTableTypes(getDefaultTableTypes());
+
+ String tablePattern = table.getName();
+
+ if (getPlatform().isDelimitedIdentifierModeOn()) {
+ tablePattern = tablePattern.toUpperCase();
+ }
+
+ tableData = metaData.getTables(tablePattern);
+
+ boolean found = false;
+ String schema = null;
+
+ while (!found && tableData.next()) {
+ Map values = readColumns(tableData, getColumnsForTable());
+ String tableName = (String) values.get("TABLE_NAME");
+
+ if ((tableName != null) && (tableName.length() > 0)) {
+ schema = (String) values.get("TABLE_SCHEM");
+ columnData = metaData.getColumns(tableName, getDefaultColumnPattern());
+ found = true;
+
+ while (found && columnData.next()) {
+ values = readColumns(columnData, getColumnsForColumn());
+
+ if (table.findColumn((String) values.get("COLUMN_NAME"), getPlatform()
+ .isDelimitedIdentifierModeOn()) == null) {
+ found = false;
+ }
+ }
+ columnData.close();
+ columnData = null;
+ }
+ }
+ return found ? schema : null;
+ } finally {
+ close(columnData);
+ close(tableData);
+ }
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/JdbcDatabasePlatformFactory.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/JdbcDatabasePlatformFactory.java
new file mode 100644
index 0000000000..ed23cd4862
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/JdbcDatabasePlatformFactory.java
@@ -0,0 +1,360 @@
+package org.jumpmind.db.platform;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.lang.reflect.Constructor;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.jumpmind.db.DdlException;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.db.platform.db2.Db2Platform;
+import org.jumpmind.db.platform.derby.DerbyPlatform;
+import org.jumpmind.db.platform.firebird.FirebirdPlatform;
+import org.jumpmind.db.platform.greenplum.GreenplumPlatform;
+import org.jumpmind.db.platform.h2.H2Platform;
+import org.jumpmind.db.platform.hsqldb.HsqlDbPlatform;
+import org.jumpmind.db.platform.hsqldb2.HsqlDb2Platform;
+import org.jumpmind.db.platform.informix.InformixPlatform;
+import org.jumpmind.db.platform.interbase.InterbasePlatform;
+import org.jumpmind.db.platform.mssql.MsSqlPlatform;
+import org.jumpmind.db.platform.mysql.MySqlPlatform;
+import org.jumpmind.db.platform.oracle.OraclePlatform;
+import org.jumpmind.db.platform.postgresql.PostgreSqlPlatform;
+import org.jumpmind.db.platform.sqlite.SqLitePlatform;
+import org.jumpmind.db.platform.sybase.SybasePlatform;
+import org.jumpmind.db.sql.SqlException;
+import org.jumpmind.log.Log;
+import org.jumpmind.log.LogFactory;
+
+/*
+ * A factory of {@link IDatabasePlatform} instances based on a case
+ * insensitive database name. Note that this is a convenience class as the platforms
+ * can also simply be created via their constructors.
+ */
+public class JdbcDatabasePlatformFactory {
+
+ /* The database name -> platform map. */
+ private static Map> platforms = new HashMap>();
+
+ /*
+ * Maps the sub-protocl part of a jdbc connection url to a OJB platform
+ * name.
+ */
+ private static HashMap jdbcSubProtocolToPlatform = new HashMap();
+
+ static {
+
+ for (String name : H2Platform.DATABASENAMES) {
+ addPlatform(platforms, name, H2Platform.class);
+ }
+ addPlatform(platforms, SqLitePlatform.DATABASENAME, SqLitePlatform.class);
+ addPlatform(platforms, InformixPlatform.DATABASENAME, InformixPlatform.class);
+ addPlatform(platforms, DerbyPlatform.DATABASENAME, DerbyPlatform.class);
+ addPlatform(platforms, FirebirdPlatform.DATABASENAME, FirebirdPlatform.class);
+ addPlatform(platforms, GreenplumPlatform.DATABASENAME, GreenplumPlatform.class);
+ addPlatform(platforms, HsqlDbPlatform.DATABASENAME, HsqlDbPlatform.class);
+ addPlatform(platforms, HsqlDb2Platform.DATABASENAME, HsqlDb2Platform.class);
+ addPlatform(platforms, InterbasePlatform.DATABASENAME, InterbasePlatform.class);
+ addPlatform(platforms, MsSqlPlatform.DATABASENAME, MsSqlPlatform.class);
+ addPlatform(platforms, MySqlPlatform.DATABASENAME, MySqlPlatform.class);
+ addPlatform(platforms, OraclePlatform.DATABASENAME, OraclePlatform.class);
+ addPlatform(platforms, PostgreSqlPlatform.DATABASENAME, PostgreSqlPlatform.class);
+ addPlatform(platforms, SybasePlatform.DATABASENAME, SybasePlatform.class);
+ addPlatform(platforms, Db2Platform.DATABASENAME, Db2Platform.class);
+
+ // Note that currently Sapdb and MaxDB have equal subprotocols and
+ // drivers so we have no means to distinguish them
+ jdbcSubProtocolToPlatform.put(Db2Platform.JDBC_SUBPROTOCOL, Db2Platform.DATABASENAME);
+ jdbcSubProtocolToPlatform.put(DerbyPlatform.JDBC_SUBPROTOCOL, DerbyPlatform.DATABASENAME);
+ jdbcSubProtocolToPlatform.put(FirebirdPlatform.JDBC_SUBPROTOCOL,
+ FirebirdPlatform.DATABASENAME);
+ jdbcSubProtocolToPlatform.put(HsqlDbPlatform.JDBC_SUBPROTOCOL, HsqlDbPlatform.DATABASENAME);
+ jdbcSubProtocolToPlatform.put(InterbasePlatform.JDBC_SUBPROTOCOL,
+ InterbasePlatform.DATABASENAME);
+ jdbcSubProtocolToPlatform.put(MsSqlPlatform.JDBC_SUBPROTOCOL, MsSqlPlatform.DATABASENAME);
+ jdbcSubProtocolToPlatform.put(MySqlPlatform.JDBC_SUBPROTOCOL, MySqlPlatform.DATABASENAME);
+ jdbcSubProtocolToPlatform.put(OraclePlatform.JDBC_SUBPROTOCOL_THIN,
+ OraclePlatform.DATABASENAME);
+ jdbcSubProtocolToPlatform.put(OraclePlatform.JDBC_SUBPROTOCOL_OCI8,
+ OraclePlatform.DATABASENAME);
+ jdbcSubProtocolToPlatform.put(OraclePlatform.JDBC_SUBPROTOCOL_THIN_OLD,
+ OraclePlatform.DATABASENAME);
+ jdbcSubProtocolToPlatform.put(PostgreSqlPlatform.JDBC_SUBPROTOCOL,
+ PostgreSqlPlatform.DATABASENAME);
+ jdbcSubProtocolToPlatform.put(SybasePlatform.JDBC_SUBPROTOCOL, SybasePlatform.DATABASENAME);
+ jdbcSubProtocolToPlatform.put(FirebirdPlatform.JDBC_SUBPROTOCOL,
+ FirebirdPlatform.DATABASENAME);
+ }
+
+ public static synchronized IDatabasePlatform createNewPlatformInstance(DataSource dataSource, DatabasePlatformSettings settings)
+ throws DdlException {
+ return createNewPlatformInstance(dataSource, settings, null);
+ }
+
+ /*
+ * Creates a new platform for the specified database. Note that this method installs
+ * the data source in the returned platform instance.
+ *
+ * @param dataSource The data source for the database
+ * @param log The logger that the platform should use
+ *
+ * @return The platform or null if the database is not
+ * supported
+ */
+ public static synchronized IDatabasePlatform createNewPlatformInstance(DataSource dataSource, DatabasePlatformSettings settings, Log log)
+ throws DdlException {
+
+ if (log == null) {
+ log = LogFactory.getLog("org.jumpmind");
+ }
+
+ // connects to the database and uses actual metadata info to get db name
+ // and version to determine platform
+ String[] nameVersion = determineDatabaseNameVersionSubprotocol(dataSource);
+
+ Class extends IDatabasePlatform> clazz = findPlatformClass(nameVersion);
+
+ try {
+ Constructor extends IDatabasePlatform> construtor = clazz.getConstructor(DataSource.class, DatabasePlatformSettings.class, Log.class);
+ return construtor.newInstance(dataSource, settings, log);
+ } catch (Exception e) {
+ throw new DdlException("Could not create a platform of type " + nameVersion[0], e);
+ }
+ }
+
+
+ protected static synchronized Class extends IDatabasePlatform> findPlatformClass(
+ String[] nameVersion) throws DdlException {
+ Class extends IDatabasePlatform> platformClass = platforms.get(String.format("%s%s",
+ nameVersion[0], nameVersion[1]).toLowerCase());
+ if (platformClass == null) {
+ platformClass = platforms.get(nameVersion[0].toLowerCase());
+ }
+
+ if (platformClass == null) {
+ String databaseName = jdbcSubProtocolToPlatform.get(nameVersion[2]);
+ if (databaseName != null) {
+ platformClass = platforms.get(databaseName.toLowerCase());
+ }
+ }
+
+ if (platformClass == null) {
+ throw new DdlException("Could not find platform for database " + nameVersion[0]);
+ } else {
+ return platformClass;
+ }
+
+ }
+
+ protected static String[] determineDatabaseNameVersionSubprotocol(DataSource dataSource)
+ {
+ Connection connection = null;
+ String[] nameVersion = new String[3];
+ try {
+ connection = dataSource.getConnection();
+ DatabaseMetaData metaData = connection.getMetaData();
+ nameVersion[0] = metaData.getDatabaseProductName();
+ nameVersion[1] = Integer.toString(metaData.getDatabaseMajorVersion());
+ final String PREFIX = "jdbc:";
+ String url = metaData.getURL();
+ if (StringUtils.isNotBlank(url) && url.length() > PREFIX.length()) {
+ url = url.substring(PREFIX.length());
+ if (url.indexOf(":") > 0) {
+ url = url.substring(0, url.indexOf(":"));
+ }
+ }
+ nameVersion[2] = url;
+
+ /*
+ * if the productName is PostgreSQL, it could be either PostgreSQL
+ * or Greenplum
+ */
+ /* query the metadata to determine which one it is */
+ if (nameVersion[0].equalsIgnoreCase(PostgreSqlPlatform.DATABASENAME)) {
+ if (isGreenplumDatabase(connection)) {
+ nameVersion[0] = GreenplumPlatform.DATABASE;
+ nameVersion[1] = Integer.toString(getGreenplumVersion(connection));
+ }
+ }
+
+ return nameVersion;
+ } catch (SQLException ex) {
+ throw new SqlException("Error while reading the database metadata: "
+ + ex.getMessage(), ex);
+ } finally {
+ if (connection != null) {
+ try {
+ connection.close();
+ } catch (SQLException ex) {
+ // we ignore this one
+ }
+ }
+ }
+ }
+
+ private static boolean isGreenplumDatabase(Connection connection) {
+ Statement stmt = null;
+ ResultSet rs = null;
+ String productName = null;
+ boolean isGreenplum = false;
+ try {
+ stmt = connection.createStatement();
+ rs = stmt.executeQuery(GreenplumPlatform.SQL_GET_GREENPLUM_NAME);
+ while (rs.next()) {
+ productName = rs.getString(1);
+ }
+ if (productName != null && productName.equalsIgnoreCase(GreenplumPlatform.DATABASE)) {
+ isGreenplum = true;
+ }
+ } catch (SQLException ex) {
+ // ignore the exception, if it is caught, then this is most likely
+ // not
+ // a greenplum database
+ } finally {
+ try {
+ if (rs != null) {
+ rs.close();
+ }
+ if (stmt != null) {
+ stmt.close();
+ }
+ } catch (SQLException ex) {
+ }
+ }
+ return isGreenplum;
+ }
+
+ private static int getGreenplumVersion(Connection connection) {
+ Statement stmt = null;
+ ResultSet rs = null;
+ String versionName = null;
+ int productVersion = 0;
+ try {
+ stmt = connection.createStatement();
+ rs = stmt.executeQuery(GreenplumPlatform.SQL_GET_GREENPLUM_VERSION);
+ while (rs.next()) {
+ versionName = rs.getString(1);
+ }
+ // take up to the first "." for version number
+ if (versionName.indexOf('.') != -1) {
+ versionName = versionName.substring(0, versionName.indexOf('.'));
+ }
+ try {
+ productVersion = Integer.parseInt(versionName);
+ } catch (NumberFormatException ex) {
+ // if we can't convert this to a version number, leave it 0
+ }
+ } catch (SQLException ex) {
+ // ignore the exception, if it is caught, then this is most likely
+ // not
+ // a greenplum database
+ } finally {
+ try {
+ rs.close();
+ stmt.close();
+ } catch (SQLException ex) {
+ }
+ }
+ return productVersion;
+ }
+
+ public static String getDatabaseProductVersion(DataSource dataSource) {
+ Connection connection = null;
+
+ try {
+ connection = dataSource.getConnection();
+ DatabaseMetaData metaData = connection.getMetaData();
+ return metaData.getDatabaseProductVersion();
+ } catch (SQLException ex) {
+ throw new SqlException("Error while reading the database metadata: "
+ + ex.getMessage(), ex);
+ } finally {
+ if (connection != null) {
+ try {
+ connection.close();
+ } catch (SQLException ex) {
+ // we ignore this one
+ }
+ }
+ }
+ }
+
+ public static int getDatabaseMajorVersion(DataSource dataSource) {
+ Connection connection = null;
+ try {
+ connection = dataSource.getConnection();
+ DatabaseMetaData metaData = connection.getMetaData();
+ return metaData.getDatabaseMajorVersion();
+ } catch (SQLException ex) {
+ throw new SqlException("Error while reading the database metadata: "
+ + ex.getMessage(), ex);
+ } finally {
+ if (connection != null) {
+ try {
+ connection.close();
+ } catch (SQLException ex) {
+ // we ignore this one
+ }
+ }
+ }
+ }
+
+ public static int getDatabaseMinorVersion(DataSource dataSource) {
+ Connection connection = null;
+ try {
+ connection = dataSource.getConnection();
+ DatabaseMetaData metaData = connection.getMetaData();
+ return metaData.getDatabaseMinorVersion();
+ } catch (SQLException ex) {
+ throw new SqlException("Error while reading the database metadata: "
+ + ex.getMessage(), ex);
+ } finally {
+ if (connection != null) {
+ try {
+ connection.close();
+ } catch (SQLException ex) {
+ // we ignore this one
+ }
+ }
+ }
+ }
+
+ private static synchronized void addPlatform(
+ Map> platformMap, String platformName,
+ Class extends IDatabasePlatform> platformClass) {
+ if (!IDatabasePlatform.class.isAssignableFrom(platformClass)) {
+ throw new IllegalArgumentException("Cannot register class " + platformClass.getName()
+ + " because it does not implement the " + IDatabasePlatform.class.getName()
+ + " interface");
+ }
+ platformMap.put(platformName.toLowerCase(), platformClass);
+
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/db2/Db2Builder.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/db2/Db2Builder.java
new file mode 100644
index 0000000000..6d20257ec8
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/db2/Db2Builder.java
@@ -0,0 +1,210 @@
+package org.jumpmind.db.platform.db2;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+import java.util.Iterator;
+import java.util.List;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.alter.AddColumnChange;
+import org.jumpmind.db.alter.AddPrimaryKeyChange;
+import org.jumpmind.db.alter.PrimaryKeyChange;
+import org.jumpmind.db.alter.RemoveColumnChange;
+import org.jumpmind.db.alter.RemovePrimaryKeyChange;
+import org.jumpmind.db.alter.TableChange;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractDdlBuilder;
+import org.jumpmind.db.platform.PlatformUtils;
+import org.jumpmind.log.Log;
+
+/*
+ * The DDL Builder for DB2.
+ */
+public class Db2Builder extends AbstractDdlBuilder {
+
+ public Db2Builder(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ addEscapedCharSequence("'", "''");
+ }
+
+ @Override
+ protected String getNativeDefaultValue(Column column) {
+ if ((column.getTypeCode() == Types.BIT)
+ || (PlatformUtils.supportsJava14JdbcTypes() && (column.getTypeCode() == PlatformUtils
+ .determineBooleanTypeCode()))) {
+ return getDefaultValueHelper().convert(column.getDefaultValue(), column.getTypeCode(),
+ Types.SMALLINT).toString();
+ } else {
+ return super.getNativeDefaultValue(column);
+ }
+ }
+
+ @Override
+ protected void writeColumnAutoIncrementStmt(Table table, Column column, StringBuilder ddl) {
+ ddl.append("GENERATED BY DEFAULT AS IDENTITY");
+ }
+
+ @Override
+ public String getSelectLastIdentityValues(Table table) {
+ return "VALUES IDENTITY_VAL_LOCAL()";
+ }
+
+ @Override
+ public void writeExternalIndexDropStmt(Table table, IIndex index, StringBuilder ddl) {
+ // Index names in DB2 are unique to a schema and hence Derby does not
+ // use the ON clause
+ ddl.append("DROP INDEX ");
+ printIdentifier(getIndexName(index), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ protected void writeCastExpression(Column sourceColumn, Column targetColumn, StringBuilder ddl) {
+ String sourceNativeType = getBareNativeType(sourceColumn);
+ String targetNativeType = getBareNativeType(targetColumn);
+
+ if (sourceNativeType.equals(targetNativeType)) {
+ printIdentifier(getColumnName(sourceColumn), ddl);
+ } else {
+ String type = getSqlType(targetColumn);
+
+ // DB2 has the limitation that it cannot convert numeric values
+ // to VARCHAR, though it can convert them to CHAR
+ if (TypeMap.isNumericType(sourceColumn.getTypeCode())
+ && "VARCHAR".equalsIgnoreCase(targetNativeType)) {
+ Object sizeSpec = targetColumn.getSize();
+
+ if (sizeSpec == null) {
+ sizeSpec = platform.getPlatformInfo()
+ .getDefaultSize(targetColumn.getTypeCode());
+ }
+ type = "CHAR(" + sizeSpec.toString() + ")";
+ }
+
+ ddl.append("CAST(");
+ printIdentifier(getColumnName(sourceColumn), ddl);
+ ddl.append(" AS ");
+ ddl.append(type);
+ ddl.append(")");
+ }
+ }
+
+ @Override
+ protected void processTableStructureChanges(Database currentModel, Database desiredModel,
+ Table sourceTable, Table targetTable, List changes, StringBuilder ddl) {
+ // DB2 provides only limited ways to alter a column, so we don't use
+ // them
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof AddColumnChange) {
+ AddColumnChange addColumnChange = (AddColumnChange) change;
+
+ // DB2 can only add not insert columns
+ // Also, DB2 does not allow the GENERATED BY DEFAULT AS IDENTITY
+ // clause in
+ // the ALTER TABLE ADD COLUMN statement, so we have to rebuild
+ // the table instead
+ if ((addColumnChange.getNextColumn() == null)
+ && !addColumnChange.getNewColumn().isAutoIncrement()) {
+ processChange(currentModel, desiredModel, addColumnChange, ddl);
+ changeIt.remove();
+ }
+ }
+ }
+
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof AddPrimaryKeyChange) {
+ processChange(currentModel, desiredModel, (AddPrimaryKeyChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof PrimaryKeyChange) {
+ processChange(currentModel, desiredModel, (PrimaryKeyChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof RemovePrimaryKeyChange) {
+ processChange(currentModel, desiredModel, (RemovePrimaryKeyChange) change, ddl);
+ changeIt.remove();
+ }
+ }
+ }
+
+ /*
+ * Processes the addition of a column to a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ AddColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD COLUMN ");
+ writeColumn(change.getChangedTable(), change.getNewColumn(), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a column from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemoveColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP COLUMN ");
+ printIdentifier(getColumnName(change.getColumn()), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a primary key from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemovePrimaryKeyChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP PRIMARY KEY");
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the change of the primary key of a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ PrimaryKeyChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP PRIMARY KEY");
+ printEndOfStatement(ddl);
+ writeExternalPrimaryKeysCreateStmt(change.getChangedTable(),
+ change.getNewPrimaryKeyColumns(), ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/db2/Db2DdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/db2/Db2DdlReader.java
new file mode 100644
index 0000000000..023dcc90f1
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/db2/Db2DdlReader.java
@@ -0,0 +1,194 @@
+package org.jumpmind.db.platform.db2;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.HashSet;
+import java.util.Map;
+
+import org.apache.oro.text.regex.MalformedPatternException;
+import org.apache.oro.text.regex.Pattern;
+import org.apache.oro.text.regex.PatternCompiler;
+import org.apache.oro.text.regex.PatternMatcher;
+import org.apache.oro.text.regex.Perl5Compiler;
+import org.apache.oro.text.regex.Perl5Matcher;
+import org.jumpmind.db.DdlException;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractJdbcDdlReader;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.log.Log;
+
+/*
+ * Reads a database model from a Db2 UDB database.
+ */
+public class Db2DdlReader extends AbstractJdbcDdlReader {
+ /* Known system tables that Db2 creates (e.g. automatic maintenance). */
+ private static final String[] KNOWN_SYSTEM_TABLES = { "STMG_DBSIZE_INFO", "HMON_ATM_INFO",
+ "HMON_COLLECTION", "POLICY" };
+
+ /* The regular expression pattern for the time values that Db2 returns. */
+ private Pattern _db2TimePattern;
+
+ /* The regular expression pattern for the timestamp values that Db2 returns. */
+ private Pattern _db2TimestampPattern;
+
+ public Db2DdlReader(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ setDefaultCatalogPattern(null);
+ setDefaultSchemaPattern(null);
+
+ PatternCompiler compiler = new Perl5Compiler();
+
+ try {
+ _db2TimePattern = compiler.compile("'(\\d{2}).(\\d{2}).(\\d{2})'");
+ _db2TimestampPattern = compiler
+ .compile("'(\\d{4}\\-\\d{2}\\-\\d{2})\\-(\\d{2}).(\\d{2}).(\\d{2})(\\.\\d{1,8})?'");
+ } catch (MalformedPatternException ex) {
+ throw new DdlException(ex);
+ }
+ }
+
+ @Override
+ protected Table readTable(Connection connection, DatabaseMetaDataWrapper metaData,
+ Map values) throws SQLException {
+ String tableName = (String) values.get("TABLE_NAME");
+
+ for (int idx = 0; idx < KNOWN_SYSTEM_TABLES.length; idx++) {
+ if (KNOWN_SYSTEM_TABLES[idx].equals(tableName)) {
+ return null;
+ }
+ }
+
+ Table table = super.readTable(connection, metaData, values);
+
+ if (table != null) {
+ // Db2 does not return the auto-increment status via the database
+ // metadata
+ determineAutoIncrementFromResultSetMetaData(connection, table, table.getColumns());
+ }
+ return table;
+ }
+
+ @Override
+ protected Column readColumn(DatabaseMetaDataWrapper metaData, Map values)
+ throws SQLException {
+ Column column = super.readColumn(metaData, values);
+
+ if (column.getDefaultValue() != null) {
+ if (column.getTypeCode() == Types.TIME) {
+ PatternMatcher matcher = new Perl5Matcher();
+
+ // Db2 returns "HH24.MI.SS"
+ if (matcher.matches(column.getDefaultValue(), _db2TimePattern)) {
+ StringBuffer newDefault = new StringBuffer();
+
+ newDefault.append("'");
+ // the hour
+ newDefault.append(matcher.getMatch().group(1));
+ newDefault.append(":");
+ // the minute
+ newDefault.append(matcher.getMatch().group(2));
+ newDefault.append(":");
+ // the second
+ newDefault.append(matcher.getMatch().group(3));
+ newDefault.append("'");
+
+ column.setDefaultValue(newDefault.toString());
+ }
+ } else if (column.getTypeCode() == Types.TIMESTAMP) {
+ PatternMatcher matcher = new Perl5Matcher();
+
+ // Db2 returns "YYYY-MM-DD-HH24.MI.SS.FF"
+ if (matcher.matches(column.getDefaultValue(), _db2TimestampPattern)) {
+ StringBuffer newDefault = new StringBuffer();
+
+ newDefault.append("'");
+ // group 1 is the date which has the correct format
+ newDefault.append(matcher.getMatch().group(1));
+ newDefault.append(" ");
+ // the hour
+ newDefault.append(matcher.getMatch().group(2));
+ newDefault.append(":");
+ // the minute
+ newDefault.append(matcher.getMatch().group(3));
+ newDefault.append(":");
+ // the second
+ newDefault.append(matcher.getMatch().group(4));
+ // optionally, the fraction
+ if ((matcher.getMatch().groups() > 4) && (matcher.getMatch().group(4) != null)) {
+ newDefault.append(matcher.getMatch().group(5));
+ }
+ newDefault.append("'");
+
+ column.setDefaultValue(newDefault.toString());
+ }
+ } else if (TypeMap.isTextType(column.getTypeCode())) {
+ column.setDefaultValue(unescape(column.getDefaultValue(), "'", "''"));
+ }
+ }
+ return column;
+ }
+
+ @Override
+ protected boolean isInternalPrimaryKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, IIndex index) throws SQLException {
+ // Db2 uses the form "SQL060205225246220" if the primary key was defined
+ // during table creation
+ // When the ALTER TABLE way was used however, the index has the name of
+ // the primary key
+ if (index.getName().startsWith("SQL")) {
+ try {
+ Long.parseLong(index.getName().substring(3));
+ return true;
+ } catch (NumberFormatException ex) {
+ // we ignore it
+ }
+ return false;
+ } else {
+ // we'll compare the index name to the names of all primary keys
+ // TODO: Once primary key names are supported, this can be done
+ // easier via the table object
+ ResultSet pkData = null;
+ HashSet pkNames = new HashSet();
+
+ try {
+ pkData = metaData.getPrimaryKeys(table.getName());
+ while (pkData.next()) {
+ Map values = readColumns(pkData, getColumnsForPK());
+
+ pkNames.add((String) values.get("PK_NAME"));
+ }
+ } finally {
+ if (pkData != null) {
+ pkData.close();
+ }
+ }
+
+ return pkNames.contains(index.getName());
+ }
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/db2/Db2Platform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/db2/Db2Platform.java
new file mode 100644
index 0000000000..d5168c28b2
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/db2/Db2Platform.java
@@ -0,0 +1,104 @@
+package org.jumpmind.db.platform.db2;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.jumpmind.db.platform.AbstractJdbcDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.log.Log;
+
+/*
+ * The DB2 platform implementation.
+ */
+public class Db2Platform extends AbstractJdbcDatabasePlatform {
+
+ /* Database name of this platform. */
+ public static final String DATABASENAME = "DB2";
+
+ /* The standard DB2 jdbc driver. */
+ public static final String JDBC_DRIVER = "com.ibm.db2.jcc.DB2Driver";
+
+ /* The subprotocol used by the standard DB2 driver. */
+ public static final String JDBC_SUBPROTOCOL = "db2";
+
+ /*
+ * Creates a new platform instance.
+ */
+ public Db2Platform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+
+ // the BINARY types are also handled by Db2Builder.getSqlType(Column)
+ info.addNativeTypeMapping(Types.ARRAY, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.BINARY, "CHAR {0} FOR BIT DATA");
+ info.addNativeTypeMapping(Types.BIT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping(Types.FLOAT, "DOUBLE", Types.DOUBLE);
+ info.addNativeTypeMapping(Types.JAVA_OBJECT, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.LONGVARBINARY, "LONG VARCHAR FOR BIT DATA");
+ info.addNativeTypeMapping(Types.LONGVARCHAR, "LONG VARCHAR");
+ info.addNativeTypeMapping(Types.NULL, "LONG VARCHAR FOR BIT DATA", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.NUMERIC, "DECIMAL", Types.DECIMAL);
+ info.addNativeTypeMapping(Types.OTHER, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.STRUCT, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping(Types.VARBINARY, "VARCHAR {0} FOR BIT DATA");
+ info.addNativeTypeMapping("BOOLEAN", "SMALLINT", "SMALLINT");
+
+ info.setDefaultSize(Types.CHAR, 254);
+ info.setDefaultSize(Types.VARCHAR, 254);
+ info.setDefaultSize(Types.BINARY, 254);
+ info.setDefaultSize(Types.VARBINARY, 254);
+ info.setStoresUpperCaseInCatalog(true);
+
+ info.setMaxIdentifierLength(128);
+ info.setMaxColumnNameLength(128);
+ info.setMaxConstraintNameLength(128);
+ info.setMaxForeignKeyNameLength(128);
+
+ info.setNonBlankCharColumnSpacePadded(true);
+ info.setBlankCharColumnSpacePadded(true);
+ info.setCharColumnSpaceTrimmed(false);
+ info.setEmptyStringNulled(false);
+
+ primaryKeyViolationCodes = new int[] {-803};
+
+ ddlReader = new Db2DdlReader(log, this);
+ ddlBuilder = new Db2Builder(log, this);
+ }
+
+ public String getName() {
+ return DATABASENAME;
+ }
+
+ public String getDefaultSchema() {
+ if (StringUtils.isBlank(defaultSchema)) {
+ defaultSchema = (String) getSqlTemplate().queryForObject("values CURRENT SCHEMA", String.class);
+ }
+ return defaultSchema;
+ }
+
+ public String getDefaultCatalog() {
+ return null;
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/db2/package.html b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/db2/package.html
new file mode 100644
index 0000000000..2443cbd6ac
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/db2/package.html
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+
+
+ This package contains the platform implementation for the
+ DB2 UDB database.
+
+
+
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/derby/DerbyBuilder.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/derby/DerbyBuilder.java
new file mode 100644
index 0000000000..ff304ea467
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/derby/DerbyBuilder.java
@@ -0,0 +1,139 @@
+package org.jumpmind.db.platform.derby;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+import java.util.Iterator;
+import java.util.List;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.alter.AddColumnChange;
+import org.jumpmind.db.alter.TableChange;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractDdlBuilder;
+import org.jumpmind.db.platform.PlatformUtils;
+import org.jumpmind.log.Log;
+
+/*
+ * The SQL Builder for Derby.
+ */
+public class DerbyBuilder extends AbstractDdlBuilder {
+
+ public DerbyBuilder(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ addEscapedCharSequence("'", "''");
+ }
+
+ @Override
+ protected String getNativeDefaultValue(Column column) {
+ if ((column.getTypeCode() == Types.BIT)
+ || (PlatformUtils.supportsJava14JdbcTypes() && (column.getTypeCode() == PlatformUtils
+ .determineBooleanTypeCode()))) {
+ return getDefaultValueHelper().convert(column.getDefaultValue(), column.getTypeCode(),
+ Types.SMALLINT).toString();
+ } else {
+ return super.getNativeDefaultValue(column);
+ }
+ }
+
+ @Override
+ public String getSelectLastIdentityValues(Table table) {
+ return "VALUES IDENTITY_VAL_LOCAL()";
+ }
+
+ @Override
+ protected void writeColumnAutoIncrementStmt(Table table, Column column, StringBuilder ddl) {
+ ddl.append("GENERATED BY DEFAULT AS IDENTITY");
+ }
+
+ @Override
+ public void writeExternalIndexDropStmt(Table table, IIndex index, StringBuilder ddl) {
+ // Index names in Derby are unique to a schema and hence Derby does not
+ // use the ON clause
+ ddl.append("DROP INDEX ");
+ printIdentifier(getIndexName(index), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ protected void writeCastExpression(Column sourceColumn, Column targetColumn, StringBuilder ddl) {
+ String sourceNativeType = getBareNativeType(sourceColumn);
+ String targetNativeType = getBareNativeType(targetColumn);
+
+ if (sourceNativeType.equals(targetNativeType)) {
+ printIdentifier(getColumnName(sourceColumn), ddl);
+ } else {
+ // Derby currently has the limitation that it cannot convert numeric
+ // values
+ // to VARCHAR, though it can convert them to CHAR
+ if (TypeMap.isNumericType(sourceColumn.getTypeCode())
+ && "VARCHAR".equalsIgnoreCase(targetNativeType)) {
+ targetNativeType = "CHAR";
+ }
+
+ ddl.append(targetNativeType);
+ ddl.append("(");
+ printIdentifier(getColumnName(sourceColumn), ddl);
+ ddl.append(")");
+ }
+ }
+
+ @Override
+ protected void processTableStructureChanges(Database currentModel, Database desiredModel,
+ Table sourceTable, Table targetTable, List changes, StringBuilder ddl) {
+ // Derby provides a way to alter the size of a column but it is limited
+ // (no pk or fk columns, only for VARCHAR columns), so we don't use it
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof AddColumnChange) {
+ AddColumnChange addColumnChange = (AddColumnChange) change;
+
+ // Derby can only add not insert columns, and the columns
+ // cannot be identity columns
+ if (addColumnChange.isAtEnd() && !addColumnChange.getNewColumn().isAutoIncrement()) {
+ processChange(currentModel, desiredModel, addColumnChange, ddl);
+ changeIt.remove();
+ }
+ }
+ }
+ super.processTableStructureChanges(currentModel, desiredModel, sourceTable, targetTable,
+ changes, ddl);
+ }
+
+ /*
+ * Processes the addition of a column to a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ AddColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD COLUMN ");
+ writeColumn(change.getChangedTable(), change.getNewColumn(), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/derby/DerbyDdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/derby/DerbyDdlReader.java
new file mode 100644
index 0000000000..c39c1281b4
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/derby/DerbyDdlReader.java
@@ -0,0 +1,100 @@
+package org.jumpmind.db.platform.derby;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.Map;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractJdbcDdlReader;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.log.Log;
+
+/*
+ * Reads a database model from a Derby database.
+ */
+public class DerbyDdlReader extends AbstractJdbcDdlReader {
+
+ public DerbyDdlReader(IDatabasePlatform platform, Log log) {
+ super(log, platform);
+ }
+
+ @Override
+ protected Column readColumn(DatabaseMetaDataWrapper metaData, Map values) throws SQLException {
+ Column column = super.readColumn(metaData, values);
+ String defaultValue = column.getDefaultValue();
+
+ if (defaultValue != null) {
+ // we check for these strings
+ // GENERATED_BY_DEFAULT -> 'GENERATED BY DEFAULT AS IDENTITY'
+ // AUTOINCREMENT: start 1 increment 1 -> 'GENERATED ALWAYS AS
+ // IDENTITY'
+ if ("GENERATED_BY_DEFAULT".equals(defaultValue)
+ || defaultValue.startsWith("AUTOINCREMENT:")) {
+ column.setDefaultValue(null);
+ column.setAutoIncrement(true);
+ } else if (TypeMap.isTextType(column.getTypeCode())) {
+ column.setDefaultValue(unescape(defaultValue, "'", "''"));
+ }
+ }
+ return column;
+ }
+
+ @Override
+ protected boolean isInternalForeignKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, ForeignKey fk, IIndex index) {
+ return isInternalIndex(index);
+ }
+
+ @Override
+ protected boolean isInternalPrimaryKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, IIndex index) {
+ return isInternalIndex(index);
+ }
+
+ /*
+ * Determines whether the index is an internal index, i.e. one created by
+ * Derby.
+ *
+ * @param index The index to check
+ *
+ * @return true if the index seems to be an internal one
+ */
+ private boolean isInternalIndex(IIndex index) {
+ String name = index.getName();
+
+ // Internal names normally have the form "SQL051228005030780"
+ if ((name != null) && name.startsWith("SQL")) {
+ try {
+ Long.parseLong(name.substring(3));
+ return true;
+ } catch (NumberFormatException ex) {
+ // we ignore it
+ }
+ }
+ return false;
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/derby/DerbyPlatform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/derby/DerbyPlatform.java
new file mode 100644
index 0000000000..bd2cc54eed
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/derby/DerbyPlatform.java
@@ -0,0 +1,107 @@
+package org.jumpmind.db.platform.derby;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.jumpmind.db.platform.AbstractJdbcDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.log.Log;
+
+/*
+ * The platform implementation for Derby.
+ */
+public class DerbyPlatform extends AbstractJdbcDatabasePlatform {
+
+ /* Database name of this platform. */
+ public static final String DATABASENAME = "Apache Derby";
+
+ /* The derby jdbc driver for use as a client for a normal server. */
+ public static final String JDBC_DRIVER = "org.apache.derby.jdbc.ClientDriver";
+
+ /* The derby jdbc driver for use as an embedded database. */
+ public static final String JDBC_DRIVER_EMBEDDED = "org.apache.derby.jdbc.EmbeddedDriver";
+
+ /* The subprotocol used by the derby drivers. */
+ public static final String JDBC_SUBPROTOCOL = "derby";
+
+ /*
+ * Creates a new Derby platform instance.
+ */
+ public DerbyPlatform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+
+ info.setMaxIdentifierLength(128);
+ info.setSystemForeignKeyIndicesAlwaysNonUnique(true);
+ info.addNativeTypeMapping(Types.ARRAY, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.BINARY, "CHAR {0} FOR BIT DATA");
+ info.addNativeTypeMapping(Types.BIT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping(Types.DISTINCT, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.JAVA_OBJECT, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.LONGVARBINARY, "LONG VARCHAR FOR BIT DATA");
+ info.addNativeTypeMapping(Types.LONGVARCHAR, "LONG VARCHAR");
+ info.addNativeTypeMapping(Types.NULL, "LONG VARCHAR FOR BIT DATA", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.OTHER, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.REF, "LONG VARCHAR FOR BIT DATA", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.STRUCT, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping(Types.VARBINARY, "VARCHAR {0} FOR BIT DATA");
+ info.addNativeTypeMapping("BOOLEAN", "SMALLINT", "SMALLINT");
+ info.addNativeTypeMapping("DATALINK", "LONG VARCHAR FOR BIT DATA", "LONGVARBINARY");
+
+ info.setDefaultSize(Types.BINARY, 254);
+ info.setDefaultSize(Types.CHAR, 254);
+ info.setDefaultSize(Types.VARBINARY, 254);
+ info.setDefaultSize(Types.VARCHAR, 254);
+
+ info.addNativeTypeMapping(Types.DOUBLE, "DOUBLE");
+ info.addNativeTypeMapping(Types.FLOAT, "DOUBLE", Types.DOUBLE);
+ info.setStoresUpperCaseInCatalog(true);
+ info.setNonBlankCharColumnSpacePadded(true);
+ info.setBlankCharColumnSpacePadded(true);
+ info.setCharColumnSpaceTrimmed(false);
+ info.setEmptyStringNulled(false);
+
+ primaryKeyViolationSqlStates = new String[] {"23505"};
+
+ ddlReader = new DerbyDdlReader(this, log);
+ ddlBuilder = new DerbyBuilder(log, this);
+ }
+
+
+ public String getName() {
+ return DATABASENAME;
+ }
+
+ public String getDefaultSchema() {
+ if (StringUtils.isBlank(defaultSchema)) {
+ defaultSchema = (String) getSqlTemplate().queryForObject("values CURRENT SCHEMA", String.class);
+ }
+ return defaultSchema;
+ }
+
+ public String getDefaultCatalog() {
+ return null;
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/derby/package.html b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/derby/package.html
new file mode 100644
index 0000000000..1bfbed1022
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/derby/package.html
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+
+
+ This package contains the platform implementation for the
+ Apache Derby database.
+
+
+
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/firebird/FirebirdBuilder.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/firebird/FirebirdBuilder.java
new file mode 100644
index 0000000000..9e42c90089
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/firebird/FirebirdBuilder.java
@@ -0,0 +1,314 @@
+package org.jumpmind.db.platform.firebird;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+import java.util.Iterator;
+import java.util.List;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.alter.AddColumnChange;
+import org.jumpmind.db.alter.AddPrimaryKeyChange;
+import org.jumpmind.db.alter.RemoveColumnChange;
+import org.jumpmind.db.alter.TableChange;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.platform.AbstractDdlBuilder;
+import org.jumpmind.db.platform.PlatformUtils;
+import org.jumpmind.log.Log;
+
+/*
+ * The SQL Builder for the FireBird database.
+ */
+public class FirebirdBuilder extends AbstractDdlBuilder {
+ public FirebirdBuilder(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ addEscapedCharSequence("'", "''");
+ }
+
+ @Override
+ public void createTable(Table table, StringBuilder ddl) {
+ super.createTable(table, ddl);
+
+ // creating generator and trigger for auto-increment
+ Column[] columns = table.getAutoIncrementColumns();
+
+ for (int idx = 0; idx < columns.length; idx++) {
+ writeAutoIncrementCreateStmts(table, columns[idx], ddl);
+ }
+ }
+
+ @Override
+ public void dropTable(Table table, StringBuilder ddl) {
+ // dropping generators for auto-increment
+ Column[] columns = table.getAutoIncrementColumns();
+
+ for (int idx = 0; idx < columns.length; idx++) {
+ writeAutoIncrementDropStmts(table, columns[idx], ddl);
+ }
+ super.dropTable(table, ddl);
+ }
+
+ /*
+ * Writes the creation statements to make the given column an auto-increment
+ * column.
+ */
+ private void writeAutoIncrementCreateStmts(Table table, Column column, StringBuilder ddl)
+ {
+ ddl.append("CREATE GENERATOR ");
+ printIdentifier(getGeneratorName(table, column), ddl);
+ printEndOfStatement(ddl);
+
+ ddl.append("CREATE TRIGGER ");
+ printIdentifier(getTriggerName(table, column), ddl);
+ ddl.append(" FOR ");
+ printlnIdentifier(getTableName(table.getName()), ddl);
+ println("ACTIVE BEFORE INSERT POSITION 0 AS", ddl);
+ ddl.append("BEGIN IF (NEW.");
+ printIdentifier(getColumnName(column), ddl);
+ ddl.append(" IS NULL) THEN NEW.");
+ printIdentifier(getColumnName(column), ddl);
+ ddl.append(" = GEN_ID(");
+ printIdentifier(getGeneratorName(table, column), ddl);
+ ddl.append(", 1); END");
+ printEndOfStatement(ddl);
+ }
+
+ /*
+ * Writes the statements to drop the auto-increment status for the given
+ * column.
+ *
+ * @param table The table
+ *
+ * @param column The column to remove the auto-increment status for
+ */
+ private void writeAutoIncrementDropStmts(Table table, Column column, StringBuilder ddl) {
+ ddl.append("DROP TRIGGER ");
+ printIdentifier(getTriggerName(table, column), ddl);
+ printEndOfStatement(ddl);
+
+ ddl.append("DROP GENERATOR ");
+ printIdentifier(getGeneratorName(table, column), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ /*
+ * Determines the name of the trigger for an auto-increment column.
+ *
+ * @param table The table
+ *
+ * @param column The auto-increment column
+ *
+ * @return The trigger name
+ */
+ protected String getTriggerName(Table table, Column column) {
+ String secondPart = column.getName();
+ // make sure a backup table gets a different name than the original
+ if (table.getName().endsWith("_")) {
+ secondPart += "_";
+ }
+ return getConstraintName("trg", table, secondPart, null);
+ }
+
+ /*
+ * Determines the name of the generator for an auto-increment column.
+ *
+ * @param table The table
+ *
+ * @param column The auto-increment column
+ *
+ * @return The generator name
+ */
+ protected String getGeneratorName(Table table, Column column) {
+ String secondPart = column.getName();
+ // make sure a backup table gets a different name than the original
+ if (table.getName().endsWith("_")) {
+ secondPart += "_";
+ }
+ return getConstraintName("gen", table, secondPart, null);
+ }
+
+ @Override
+ protected void writeColumnAutoIncrementStmt(Table table, Column column, StringBuilder ddl) {
+ // we're using a generator
+ }
+
+ @Override
+ public String getSelectLastIdentityValues(Table table) {
+ Column[] columns = table.getAutoIncrementColumns();
+
+ if (columns.length == 0) {
+ return null;
+ } else {
+ StringBuffer result = new StringBuffer();
+
+ result.append("SELECT ");
+ for (int idx = 0; idx < columns.length; idx++) {
+ result.append("GEN_ID(");
+ result.append(getDelimitedIdentifier(getGeneratorName(table, columns[idx])));
+ result.append(", 0)");
+ }
+ result.append(" FROM RDB$DATABASE");
+ return result.toString();
+ }
+ }
+
+ @Override
+ protected String getNativeDefaultValue(Column column) {
+ if ((column.getTypeCode() == Types.BIT)
+ || (PlatformUtils.supportsJava14JdbcTypes() && (column.getTypeCode() == PlatformUtils
+ .determineBooleanTypeCode()))) {
+ return getDefaultValueHelper().convert(column.getDefaultValue(), column.getTypeCode(),
+ Types.SMALLINT).toString();
+ } else {
+ return super.getNativeDefaultValue(column);
+ }
+ }
+
+ @Override
+ public void createExternalForeignKeys(Database database, StringBuilder ddl) {
+ for (int idx = 0; idx < database.getTableCount(); idx++) {
+ createExternalForeignKeys(database, database.getTable(idx), ddl);
+ }
+ }
+
+ @Override
+ public void writeExternalIndexDropStmt(Table table, IIndex index, StringBuilder ddl) {
+ // Index names in Firebird are unique to a schema and hence Firebird
+ // does not
+ // use the ON clause
+ ddl.append("DROP INDEX ");
+ printIdentifier(getIndexName(index), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ protected void processTableStructureChanges(Database currentModel, Database desiredModel,
+ Table sourceTable, Table targetTable, List changes, StringBuilder ddl) {
+ // TODO: Dropping of primary keys is currently not supported because we
+ // cannot
+ // determine the pk constraint names and drop them in one go
+ // (We could used a stored procedure if Firebird would allow them to use
+ // DDL)
+ // This will be easier once named primary keys are supported
+ boolean pkColumnAdded = false;
+
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof AddColumnChange) {
+ AddColumnChange addColumnChange = (AddColumnChange) change;
+
+ // TODO: we cannot add columns to the primary key this way
+ // because we would have to drop the pk first and then
+ // add a new one afterwards which is not supported yet
+ if (addColumnChange.getNewColumn().isPrimaryKey()) {
+ pkColumnAdded = true;
+ } else {
+ processChange(currentModel, desiredModel, addColumnChange, ddl);
+ changeIt.remove();
+ }
+ } else if (change instanceof RemoveColumnChange) {
+ RemoveColumnChange removeColumnChange = (RemoveColumnChange) change;
+
+ // TODO: we cannot drop primary key columns this way
+ // because we would have to drop the pk first and then
+ // add a new one afterwards which is not supported yet
+ if (!removeColumnChange.getColumn().isPrimaryKey()) {
+ processChange(currentModel, desiredModel, removeColumnChange, ddl);
+ changeIt.remove();
+ }
+ }
+ }
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ // we can only add a primary key if all columns are present in the
+ // table
+ // i.e. none was added during this alteration
+ if ((change instanceof AddPrimaryKeyChange) && !pkColumnAdded) {
+ processChange(currentModel, desiredModel, (AddPrimaryKeyChange) change, ddl);
+ changeIt.remove();
+ }
+ }
+ }
+
+ /*
+ * Processes the addition of a column to a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ AddColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD ");
+ writeColumn(change.getChangedTable(), change.getNewColumn(), ddl);
+ printEndOfStatement(ddl);
+
+ Table curTable = currentModel.findTable(change.getChangedTable().getName(),
+ platform.isDelimitedIdentifierModeOn());
+
+ if (!change.isAtEnd()) {
+ Column prevColumn = change.getPreviousColumn();
+
+ if (prevColumn != null) {
+ // we need the corresponding column object from the current
+ // table
+ prevColumn = curTable.findColumn(prevColumn.getName(),
+ platform.isDelimitedIdentifierModeOn());
+ }
+ // Even though Firebird can only add columns, we can move them later
+ // on
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ALTER ");
+ printIdentifier(getColumnName(change.getNewColumn()), ddl);
+ ddl.append(" POSITION ");
+ // column positions start at 1 in Firebird
+ ddl.append(prevColumn == null ? "1" : String
+ .valueOf(curTable.getColumnIndex(prevColumn) + 2));
+ printEndOfStatement(ddl);
+ }
+ if (change.getNewColumn().isAutoIncrement()) {
+ writeAutoIncrementCreateStmts(curTable, change.getNewColumn(), ddl);
+ }
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a column from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemoveColumnChange change, StringBuilder ddl) {
+ if (change.getColumn().isAutoIncrement()) {
+ writeAutoIncrementDropStmts(change.getChangedTable(), change.getColumn(), ddl);
+ }
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP ");
+ printIdentifier(getColumnName(change.getColumn()), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/firebird/FirebirdDdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/firebird/FirebirdDdlReader.java
new file mode 100644
index 0000000000..941ea6afb3
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/firebird/FirebirdDdlReader.java
@@ -0,0 +1,431 @@
+package org.jumpmind.db.platform.firebird;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.collections.map.ListOrderedMap;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.IDdlBuilder;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractJdbcDdlReader;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.log.Log;
+
+/*
+ * The Jdbc Model Reader for Firebird.
+ */
+public class FirebirdDdlReader extends AbstractJdbcDdlReader {
+
+ public FirebirdDdlReader(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ setDefaultCatalogPattern(null);
+ setDefaultSchemaPattern(null);
+ setDefaultTablePattern("%");
+ }
+
+ @Override
+ protected Table readTable(Connection connection, DatabaseMetaDataWrapper metaData, Map values)
+ throws SQLException {
+ Table table = super.readTable(connection, metaData, values);
+
+ if (table != null) {
+ determineAutoIncrementColumns(connection, table);
+ }
+
+ return table;
+ }
+
+ @Override
+ protected Collection readColumns(DatabaseMetaDataWrapper metaData, String tableName)
+ throws SQLException {
+ ResultSet columnData = null;
+ try {
+ List columns = new ArrayList();
+
+ if (getPlatform().isDelimitedIdentifierModeOn()) {
+ // Jaybird has a problem when delimited identifiers are used as
+ // it is not able to find the columns for the table
+ // So we have to filter manually below
+ columnData = metaData.getColumns(getDefaultTablePattern(),
+ getDefaultColumnPattern());
+
+ while (columnData.next()) {
+ Map values = readColumns(columnData, getColumnsForColumn());
+
+ if (tableName.equals(values.get("TABLE_NAME"))) {
+ columns.add(readColumn(metaData, values));
+ }
+ }
+ } else {
+ columnData = metaData.getColumns(tableName, getDefaultColumnPattern());
+
+ while (columnData.next()) {
+ Map values = readColumns(columnData, getColumnsForColumn());
+
+ if (tableName.equals(values.get("TABLE_NAME"))) {
+ columns.add(readColumn(metaData, values));
+ }
+ }
+ }
+
+ return columns;
+ } finally {
+ if (columnData != null) {
+ columnData.close();
+ }
+ }
+ }
+
+ @Override
+ protected Column readColumn(DatabaseMetaDataWrapper metaData, Map values) throws SQLException {
+ Column column = super.readColumn(metaData, values);
+
+ if (column.getTypeCode() == Types.FLOAT) {
+ column.setTypeCode(Types.REAL);
+ } else if (TypeMap.isTextType(column.getTypeCode())) {
+ column.setDefaultValue(unescape(column.getDefaultValue(), "'", "''"));
+ }
+ return column;
+ }
+
+ /*
+ * Helper method that determines the auto increment status using Firebird's
+ * system tables.
+ *
+ * @param table The table
+ */
+ protected void determineAutoIncrementColumns(Connection connection, Table table)
+ throws SQLException {
+ // Since for long table and column names, the generator name will be
+ // shortened
+ // we have to determine for each column whether there is a generator for
+ // it
+ Column[] columns = table.getColumns();
+ HashMap names = new HashMap();
+ String name;
+
+ for (int idx = 0; idx < columns.length; idx++) {
+ name = ((FirebirdBuilder) getPlatform().getDdlBuilder()).getGeneratorName(table,
+ columns[idx]);
+ if (!getPlatform().isDelimitedIdentifierModeOn()) {
+ name = name.toUpperCase();
+ }
+ names.put(name, columns[idx]);
+ }
+
+ Statement stmt = connection.createStatement();
+
+ try {
+ ResultSet rs = stmt.executeQuery("SELECT RDB$GENERATOR_NAME FROM RDB$GENERATORS");
+
+ while (rs.next()) {
+ String generatorName = rs.getString(1).trim();
+ Column column = (Column) names.get(generatorName);
+
+ if (column != null) {
+ column.setAutoIncrement(true);
+ }
+ }
+ rs.close();
+ } finally {
+ stmt.close();
+ }
+ }
+
+ @Override
+ protected Collection readPrimaryKeyNames(DatabaseMetaDataWrapper metaData, String tableName)
+ throws SQLException {
+ List pks = new ArrayList();
+ ResultSet pkData = null;
+
+ try {
+ if (getPlatform().isDelimitedIdentifierModeOn()) {
+ // Jaybird has a problem when delimited identifiers are used as
+ // it is not able to find the primary key info for the table
+ // So we have to filter manually below
+ pkData = metaData.getPrimaryKeys(getDefaultTablePattern());
+ while (pkData.next()) {
+ Map values = readColumns(pkData, getColumnsForPK());
+
+ if (tableName.equals(values.get("TABLE_NAME"))) {
+ pks.add(readPrimaryKeyName(metaData, values));
+ }
+ }
+ } else {
+ pkData = metaData.getPrimaryKeys(tableName);
+ while (pkData.next()) {
+ Map values = readColumns(pkData, getColumnsForPK());
+
+ if (tableName.equals(values.get("TABLE_NAME"))) {
+ pks.add(readPrimaryKeyName(metaData, values));
+ }
+ }
+ }
+ } finally {
+ if (pkData != null) {
+ pkData.close();
+ }
+ }
+ return pks;
+ }
+
+ @Override
+ protected Collection readForeignKeys(Connection connection, DatabaseMetaDataWrapper metaData,
+ String tableName) throws SQLException {
+ Map fks = new ListOrderedMap();
+ ResultSet fkData = null;
+
+ try {
+ if (getPlatform().isDelimitedIdentifierModeOn()) {
+ // Jaybird has a problem when delimited identifiers are used as
+ // it is not able to find the foreign key info for the table
+ // So we have to filter manually below
+ fkData = metaData.getForeignKeys(getDefaultTablePattern());
+ while (fkData.next()) {
+ Map values = readColumns(fkData, getColumnsForFK());
+
+ if (tableName.equals(values.get("FKTABLE_NAME"))) {
+ readForeignKey(metaData, values, fks);
+ }
+ }
+ } else {
+ fkData = metaData.getForeignKeys(tableName);
+ while (fkData.next()) {
+ Map values = readColumns(fkData, getColumnsForFK());
+
+ if (tableName.equals(values.get("FKTABLE_NAME"))) {
+ readForeignKey(metaData, values, fks);
+ }
+ }
+ }
+ } finally {
+ if (fkData != null) {
+ fkData.close();
+ }
+ }
+ return fks.values();
+ }
+
+ @Override
+ protected Collection readIndices(Connection connection, DatabaseMetaDataWrapper metaData,
+ String tableName) throws SQLException {
+ // Jaybird is not able to read indices when delimited identifiers are
+ // turned on,
+ // so we gather the data manually using Firebird's system tables
+ Map indices = new ListOrderedMap();
+ StringBuilder query = new StringBuilder();
+
+ query.append("SELECT a.RDB$INDEX_NAME INDEX_NAME, b.RDB$RELATION_NAME TABLE_NAME, b.RDB$UNIQUE_FLAG NON_UNIQUE,");
+ query.append(" a.RDB$FIELD_POSITION ORDINAL_POSITION, a.RDB$FIELD_NAME COLUMN_NAME, 3 INDEX_TYPE");
+ query.append(" FROM RDB$INDEX_SEGMENTS a, RDB$INDICES b WHERE a.RDB$INDEX_NAME=b.RDB$INDEX_NAME AND b.RDB$RELATION_NAME = ?");
+
+ PreparedStatement stmt = connection.prepareStatement(query.toString());
+ ResultSet indexData = null;
+
+ stmt.setString(1,
+ getPlatform().isDelimitedIdentifierModeOn() ? tableName : tableName.toUpperCase());
+
+ try {
+ indexData = stmt.executeQuery();
+
+ while (indexData.next()) {
+ Map values = readColumns(indexData, getColumnsForIndex());
+
+ // we have to reverse the meaning of the unique flag
+ values.put("NON_UNIQUE",
+ Boolean.FALSE.equals(values.get("NON_UNIQUE")) ? Boolean.TRUE
+ : Boolean.FALSE);
+ // and trim the names
+ values.put("INDEX_NAME", ((String) values.get("INDEX_NAME")).trim());
+ values.put("TABLE_NAME", ((String) values.get("TABLE_NAME")).trim());
+ values.put("COLUMN_NAME", ((String) values.get("COLUMN_NAME")).trim());
+ readIndex(metaData, values, indices);
+ }
+ } finally {
+ if (indexData != null) {
+ indexData.close();
+ }
+ }
+ return indices.values();
+ }
+
+ @Override
+ protected boolean isInternalPrimaryKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, IIndex index) throws SQLException {
+ IDdlBuilder builder = getPlatform().getDdlBuilder();
+ String tableName = builder.getTableName(table.getName());
+ String indexName = builder.getIndexName(index);
+ StringBuilder query = new StringBuilder();
+
+ query.append("SELECT RDB$CONSTRAINT_NAME FROM RDB$RELATION_CONSTRAINTS where RDB$RELATION_NAME=? AND RDB$CONSTRAINT_TYPE=? AND RDB$INDEX_NAME=?");
+
+ PreparedStatement stmt = connection.prepareStatement(query.toString());
+
+ try {
+ stmt.setString(
+ 1,
+ getPlatform().isDelimitedIdentifierModeOn() ? tableName : tableName
+ .toUpperCase());
+ stmt.setString(2, "PRIMARY KEY");
+ stmt.setString(3, indexName);
+
+ ResultSet resultSet = stmt.executeQuery();
+
+ return resultSet.next();
+ } finally {
+ if (stmt != null) {
+ stmt.close();
+ }
+ }
+ }
+
+ @Override
+ protected boolean isInternalForeignKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, ForeignKey fk, IIndex index)
+ throws SQLException {
+ IDdlBuilder builder = getPlatform().getDdlBuilder();
+ String tableName = builder.getTableName(table.getName());
+ String indexName = builder.getIndexName(index);
+ String fkName = builder.getForeignKeyName(table, fk);
+ StringBuilder query = new StringBuilder();
+
+ query.append("SELECT RDB$CONSTRAINT_NAME FROM RDB$RELATION_CONSTRAINTS where RDB$RELATION_NAME=? AND RDB$CONSTRAINT_TYPE=? AND RDB$CONSTRAINT_NAME=? AND RDB$INDEX_NAME=?");
+
+ PreparedStatement stmt = connection.prepareStatement(query.toString());
+
+ try {
+ stmt.setString(
+ 1,
+ getPlatform().isDelimitedIdentifierModeOn() ? tableName : tableName
+ .toUpperCase());
+ stmt.setString(2, "FOREIGN KEY");
+ stmt.setString(3, fkName);
+ stmt.setString(4, indexName);
+
+ ResultSet resultSet = stmt.executeQuery();
+
+ return resultSet.next();
+ } finally {
+ if (stmt != null) {
+ stmt.close();
+ }
+ }
+ }
+
+ @Override
+ public String determineSchemaOf(Connection connection, String schemaPattern, Table table)
+ throws SQLException {
+ ResultSet tableData = null;
+ ResultSet columnData = null;
+
+ try {
+ DatabaseMetaDataWrapper metaData = new DatabaseMetaDataWrapper();
+
+ metaData.setMetaData(connection.getMetaData());
+ metaData.setCatalog(getDefaultCatalogPattern());
+ metaData.setSchemaPattern(schemaPattern == null ? getDefaultSchemaPattern()
+ : schemaPattern);
+ metaData.setTableTypes(getDefaultTableTypes());
+
+ String tablePattern = table.getName();
+
+ if (getPlatform().isDelimitedIdentifierModeOn()) {
+ tablePattern = tablePattern.toUpperCase();
+ }
+
+ tableData = metaData.getTables(tablePattern);
+
+ boolean found = false;
+ String schema = null;
+
+ while (!found && tableData.next()) {
+ Map values = readColumns(tableData, getColumnsForTable());
+ String tableName = (String) values.get("TABLE_NAME");
+
+ if ((tableName != null) && (tableName.length() > 0)) {
+ schema = (String) values.get("TABLE_SCHEM");
+ found = true;
+
+ if (getPlatform().isDelimitedIdentifierModeOn()) {
+ // Jaybird has a problem when delimited identifiers are
+ // used as
+ // it is not able to find the columns for the table
+ // So we have to filter manually below
+ columnData = metaData.getColumns(getDefaultTablePattern(),
+ getDefaultColumnPattern());
+ } else {
+ columnData = metaData.getColumns(tableName, getDefaultColumnPattern());
+ }
+
+ while (found && columnData.next()) {
+ values = readColumns(columnData, getColumnsForColumn());
+
+ if (getPlatform().isDelimitedIdentifierModeOn()
+ && !tableName.equals(values.get("TABLE_NAME"))) {
+ continue;
+ }
+
+ if (table.findColumn((String) values.get("COLUMN_NAME"), getPlatform()
+ .isDelimitedIdentifierModeOn()) == null) {
+ found = false;
+ }
+ }
+ columnData.close();
+ columnData = null;
+ }
+ }
+ return found ? schema : null;
+ } finally {
+ if (columnData != null) {
+ columnData.close();
+ }
+ if (tableData != null) {
+ tableData.close();
+ }
+ }
+ }
+
+ @Override
+ protected String getTableNamePattern(String tableName) {
+ /*
+ * When looking up a table definition, Jaybird treats underscore (_) in
+ * the table name as a wildcard, so it needs to be escaped, or you'll
+ * get back column names for more than one table. Example:
+ * DatabaseMetaData.metaData.getColumns(null, null, "SYM\\_NODE", null)
+ */
+ return tableName.replaceAll("\\_", "\\\\_");
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/firebird/FirebirdPlatform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/firebird/FirebirdPlatform.java
new file mode 100644
index 0000000000..b557220bd8
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/firebird/FirebirdPlatform.java
@@ -0,0 +1,108 @@
+package org.jumpmind.db.platform.firebird;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+
+import javax.sql.DataSource;
+
+import org.jumpmind.db.DatabasePlatformInfo;
+import org.jumpmind.db.platform.AbstractJdbcDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.log.Log;
+
+/*
+ * The platform implementation for the Firebird database.
+ * It is assumed that the database is configured with sql dialect 3!
+ */
+public class FirebirdPlatform extends AbstractJdbcDatabasePlatform {
+ /* Database name of this platform. */
+ public static final String DATABASENAME = "Firebird";
+
+ /* The standard Firebird jdbc driver. */
+ public static final String JDBC_DRIVER = "org.firebirdsql.jdbc.FBDriver";
+
+ /* The subprotocol used by the standard Firebird driver. */
+ public static final String JDBC_SUBPROTOCOL = "firebirdsql";
+
+ /*
+ * Creates a new Firebird platform instance.
+ */
+ public FirebirdPlatform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+
+ DatabasePlatformInfo info = getPlatformInfo();
+
+ info.setMaxIdentifierLength(31);
+ info.setSystemForeignKeyIndicesAlwaysNonUnique(true);
+ info.setCommentPrefix("/*");
+ info.setCommentSuffix("*/");
+
+ info.addNativeTypeMapping(Types.ARRAY, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.BINARY, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.BIT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping(Types.CLOB, "BLOB SUB_TYPE TEXT", Types.LONGVARCHAR);
+ info.addNativeTypeMapping(Types.DISTINCT, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.BLOB, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.DOUBLE, "DOUBLE PRECISION");
+ info.addNativeTypeMapping(Types.FLOAT, "DOUBLE PRECISION", Types.DOUBLE);
+ info.addNativeTypeMapping(Types.JAVA_OBJECT, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.LONGVARBINARY, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.LONGVARCHAR, "BLOB SUB_TYPE TEXT");
+ info.addNativeTypeMapping(Types.NULL, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.OTHER, "BLOB", Types.LONGVARBINARY);
+ // This is back-mapped to REAL in the model reader
+ info.addNativeTypeMapping(Types.REAL, "FLOAT");
+ info.addNativeTypeMapping(Types.REF, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.STRUCT, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping(Types.VARBINARY, "BLOB", Types.LONGVARBINARY);
+
+ info.addNativeTypeMapping("BOOLEAN", "SMALLINT", "SMALLINT");
+ info.addNativeTypeMapping("DATALINK", "BLOB", "LONGVARBINARY");
+
+ info.setDefaultSize(Types.VARCHAR, 254);
+ info.setDefaultSize(Types.CHAR, 254);
+
+ info.setStoresUpperCaseInCatalog(true);
+ info.setNonBlankCharColumnSpacePadded(true);
+ info.setBlankCharColumnSpacePadded(true);
+ info.setCharColumnSpaceTrimmed(false);
+ info.setEmptyStringNulled(false);
+
+ primaryKeyViolationCodes = new int [] {335544665};
+
+ ddlReader = new FirebirdDdlReader(log, this);
+ ddlBuilder = new FirebirdBuilder(log, this);
+ }
+
+ public String getName() {
+ return DATABASENAME;
+ }
+
+ public String getDefaultCatalog() {
+ return null;
+ }
+
+ public String getDefaultSchema() {
+ return null;
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/firebird/package.html b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/firebird/package.html
new file mode 100644
index 0000000000..303e9bb5fa
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/firebird/package.html
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+
+
+ This package contains the platform implementation for the
+ Firebird database.
+
+
+
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/greenplum/GreenplumDdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/greenplum/GreenplumDdlReader.java
new file mode 100644
index 0000000000..7577c06c4c
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/greenplum/GreenplumDdlReader.java
@@ -0,0 +1,76 @@
+package org.jumpmind.db.platform.greenplum;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Map;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.db.platform.postgresql.PostgreSqlDdlReader;
+import org.jumpmind.log.Log;
+
+public class GreenplumDdlReader extends PostgreSqlDdlReader {
+
+ public GreenplumDdlReader(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ }
+
+ protected void setDistributionKeys(Connection connection, Table table, String schema)
+ throws SQLException {
+
+ // get the distribution keys for segments
+ StringBuilder query = new StringBuilder();
+
+ query.append("select ");
+ query.append(" t.relname, ");
+ query.append(" a.attname ");
+ query.append("from ");
+ query.append(" pg_class t, ");
+ query.append(" pg_namespace n, ");
+ query.append(" pg_attribute a, ");
+ query.append(" gp_distribution_policy p ");
+ query.append("where ");
+ query.append(" n.oid = t.relnamespace and ");
+ query.append(" p.localoid = t.oid and ");
+ query.append(" a.attrelid = t.oid and ");
+ query.append(" a.attnum = any(p.attrnums) and ");
+ query.append(" n.nspname = ? and ");
+ query.append(" t.relname = ? ");
+
+ PreparedStatement prepStmt = connection.prepareStatement(query.toString());
+
+ try {
+ // set the schema parm in the query
+ prepStmt.setString(1, schema);
+ prepStmt.setString(2, table.getName());
+ ResultSet rs = prepStmt.executeQuery();
+
+ // for every row, set the distributionKey for the corresponding
+ // columns
+ while (rs.next()) {
+ Column column = table.findColumn(rs.getString(2).trim(), getPlatform()
+ .isDelimitedIdentifierModeOn());
+ if (column != null) {
+ column.setDistributionKey(true);
+ }
+ }
+ rs.close();
+ } finally {
+ if (prepStmt != null) {
+ prepStmt.close();
+ }
+ }
+ }
+
+ @Override
+ protected Table readTable(Connection connection, DatabaseMetaDataWrapper metaData,
+ Map values) throws SQLException {
+ Table table = super.readTable(connection, metaData, values);
+ setDistributionKeys(connection, table, metaData.getSchemaPattern());
+ return table;
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/greenplum/GreenplumPlatform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/greenplum/GreenplumPlatform.java
new file mode 100644
index 0000000000..15c9a01a95
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/greenplum/GreenplumPlatform.java
@@ -0,0 +1,25 @@
+package org.jumpmind.db.platform.greenplum;
+
+import javax.sql.DataSource;
+
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.db.platform.postgresql.PostgreSqlPlatform;
+import org.jumpmind.log.Log;
+
+public class GreenplumPlatform extends PostgreSqlPlatform {
+
+ /* Database name of this platform. */
+ public static final String DATABASE = "Greenplum";
+ public static final String DATABASENAME = "Greenplum4";
+
+ /* PostgreSql can be either PostgreSql or Greenplum. Metadata queries to determine which one */
+ public static final String SQL_GET_GREENPLUM_NAME = "select gpname from gp_id";
+ public static final String SQL_GET_GREENPLUM_VERSION = "select productversion from gp_version_at_initdb";
+
+ public GreenplumPlatform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+ info.setTriggersSupported(false);
+ this.ddlReader = new GreenplumDdlReader(log, this);
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/h2/H2Builder.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/h2/H2Builder.java
new file mode 100644
index 0000000000..f1c37098df
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/h2/H2Builder.java
@@ -0,0 +1,134 @@
+/*
+ * To change this template, choose Tools | Templates
+ * and open the template in the editor.
+ */
+
+package org.jumpmind.db.platform.h2;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+
+import org.apache.commons.lang.StringUtils;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.alter.AddColumnChange;
+import org.jumpmind.db.alter.RemoveColumnChange;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.ModelException;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractDdlBuilder;
+import org.jumpmind.log.Log;
+
+/*
+ * The SQL Builder for the H2 database.
+ */
+public class H2Builder extends AbstractDdlBuilder {
+
+ public H2Builder(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ addEscapedCharSequence("'", "''");
+ }
+
+ protected void processChange(Database currentModel, Database desiredModel,
+ AddColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD COLUMN ");
+ writeColumn(change.getChangedTable(), change.getNewColumn(), ddl);
+ if (change.getNextColumn() != null) {
+ ddl.append(" BEFORE ");
+ printIdentifier(getColumnName(change.getNextColumn()), ddl);
+ }
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a column from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemoveColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP COLUMN ");
+ printIdentifier(getColumnName(change.getColumn()), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ @Override
+ protected void writeColumnDefaultValueStmt(Table table, Column column, StringBuilder ddl) {
+ Object parsedDefault = column.getParsedDefaultValue();
+
+ if (parsedDefault != null) {
+ if (!platform.getPlatformInfo().isDefaultValuesForLongTypesSupported()
+ && ((column.getTypeCode() == Types.LONGVARBINARY) || (column.getTypeCode() == Types.LONGVARCHAR))) {
+ throw new ModelException(
+ "The platform does not support default values for LONGVARCHAR or LONGVARBINARY columns");
+ }
+ // we write empty default value strings only if the type is not a
+ // numeric or date/time type
+ if (isValidDefaultValue(column.getDefaultValue(), column.getTypeCode())) {
+ ddl.append(" DEFAULT ");
+ writeColumnDefaultValue(table, column, ddl);
+ }
+ } else if (platform.getPlatformInfo().isDefaultValueUsedForIdentitySpec()
+ && column.isAutoIncrement()) {
+ ddl.append(" DEFAULT ");
+ writeColumnDefaultValue(table, column, ddl);
+ } else if (!StringUtils.isBlank(column.getDefaultValue())) {
+ ddl.append(" DEFAULT ");
+ writeColumnDefaultValue(table, column, ddl);
+ }
+ }
+
+ @Override
+ protected void printDefaultValue(Object defaultValue, int typeCode, StringBuilder ddl) {
+ if (defaultValue != null) {
+ String defaultValueStr = defaultValue.toString();
+ boolean shouldUseQuotes = !TypeMap.isNumericType(typeCode)
+ && !defaultValueStr.startsWith("TO_DATE(")
+ && !defaultValue.equals("CURRENT_TIMESTAMP")
+ && !defaultValue.equals("CURRENT_TIME") && !defaultValue.equals("CURRENT_DATE");
+ ;
+
+ if (shouldUseQuotes) {
+ // characters are only escaped when within a string literal
+ ddl.append(platform.getPlatformInfo().getValueQuoteToken());
+ ddl.append(escapeStringValue(defaultValueStr));
+ ddl.append(platform.getPlatformInfo().getValueQuoteToken());
+ } else {
+ ddl.append(defaultValueStr);
+ }
+ }
+ }
+
+ @Override
+ public void writeExternalIndexDropStmt(Table table, IIndex index, StringBuilder ddl) {
+ ddl.append("DROP INDEX IF EXISTS ");
+ printIdentifier(getIndexName(index), ddl);
+ printEndOfStatement(ddl);
+ }
+}
\ No newline at end of file
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/h2/H2DdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/h2/H2DdlReader.java
new file mode 100644
index 0000000000..e7308629ac
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/h2/H2DdlReader.java
@@ -0,0 +1,98 @@
+package org.jumpmind.db.platform.h2;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.List;
+import java.util.Map;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractJdbcDdlReader;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.db.platform.MetaDataColumnDescriptor;
+import org.jumpmind.log.Log;
+
+/*
+ * Reads a database model from a H2 database. From patch https://issues.apache.org/jira/browse/DDLUTILS-185
+ */
+public class H2DdlReader extends AbstractJdbcDdlReader {
+
+ public H2DdlReader(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ setDefaultCatalogPattern(null);
+ setDefaultSchemaPattern(null);
+ }
+
+ @Override
+ protected Column readColumn(DatabaseMetaDataWrapper metaData, Map values)
+ throws SQLException {
+ Column column = super.readColumn(metaData, values);
+ if (values.get("CHARACTER_MAXIMUM_LENGTH") != null) {
+ column.setSize(values.get("CHARACTER_MAXIMUM_LENGTH").toString());
+ }
+ if (values.get("COLUMN_DEFAULT") != null) {
+ column.setDefaultValue(values.get("COLUMN_DEFAULT").toString());
+ }
+ if (values.get("NUMERIC_SCALE") != null) {
+ column.setScale((Integer) values.get("NUMERIC_SCALE"));
+ }
+ if (TypeMap.isTextType(column.getTypeCode()) && (column.getDefaultValue() != null)) {
+ column.setDefaultValue(unescape(column.getDefaultValue(), "'", "''"));
+ }
+
+ String autoIncrement = (String) values.get("IS_AUTOINCREMENT");
+ if (autoIncrement != null) {
+ column.setAutoIncrement("YES".equalsIgnoreCase(autoIncrement.trim()));
+ }
+ return column;
+ }
+
+ @Override
+ protected List initColumnsForColumn() {
+ List result = super.initColumnsForColumn();
+ result.add(new MetaDataColumnDescriptor("COLUMN_DEFAULT", 12));
+ result.add(new MetaDataColumnDescriptor("NUMERIC_SCALE", 4, new Integer(0)));
+ result.add(new MetaDataColumnDescriptor("CHARACTER_MAXIMUM_LENGTH", 12));
+ return result;
+ }
+
+ @Override
+ protected boolean isInternalForeignKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, ForeignKey fk, IIndex index) {
+ String name = index.getName();
+ return name != null && name.startsWith("CONSTRAINT_INDEX_");
+ }
+
+ @Override
+ protected boolean isInternalPrimaryKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, IIndex index) {
+ String name = index.getName();
+ return name != null && name.startsWith("PRIMARY_KEY_");
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/h2/H2Platform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/h2/H2Platform.java
new file mode 100644
index 0000000000..3467b03a13
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/h2/H2Platform.java
@@ -0,0 +1,103 @@
+package org.jumpmind.db.platform.h2;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.platform.AbstractJdbcDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.log.Log;
+
+/*
+ * The platform implementation for the H2 database.
+ */
+public class H2Platform extends AbstractJdbcDatabasePlatform implements IDatabasePlatform {
+
+ /* Database name of this platform. */
+ public static final String[] DATABASENAMES = { "H2", "H21" };
+
+ /* The standard H2 driver. */
+ public static final String JDBC_DRIVER = "org.h2.Driver";
+
+ /* The sub protocol used by the H2 driver. */
+ public static final String JDBC_SUBPROTOCOL = "h2";
+
+ /*
+ * Creates a new instance of the H2 platform.
+ */
+ public H2Platform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+
+ info.setNonPKIdentityColumnsSupported(false);
+ info.setIdentityOverrideAllowed(false);
+ info.setSystemForeignKeyIndicesAlwaysNonUnique(true);
+ info.setNullAsDefaultValueRequired(false);
+ info.addNativeTypeMapping(Types.ARRAY, "BINARY", Types.BINARY);
+ info.addNativeTypeMapping(Types.DISTINCT, "BINARY", Types.BINARY);
+ info.addNativeTypeMapping(Types.NULL, "BINARY", Types.BINARY);
+ info.addNativeTypeMapping(Types.REF, "BINARY", Types.BINARY);
+ info.addNativeTypeMapping(Types.STRUCT, "BINARY", Types.BINARY);
+ info.addNativeTypeMapping(Types.DATALINK, "BINARY", Types.BINARY);
+ info.addNativeTypeMapping(Types.BIT, "BOOLEAN", Types.BIT);
+ info.addNativeTypeMapping(Types.NUMERIC, "DECIMAL", Types.DECIMAL);
+ info.addNativeTypeMapping(Types.BINARY, "BINARY", Types.BINARY);
+ info.addNativeTypeMapping(Types.BLOB, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.CLOB, "CLOB", Types.CLOB);
+ info.addNativeTypeMapping(Types.LONGVARCHAR, "VARCHAR", Types.VARCHAR);
+ info.addNativeTypeMapping(Types.FLOAT, "DOUBLE", Types.DOUBLE);
+ info.addNativeTypeMapping(Types.JAVA_OBJECT, "OTHER");
+
+ info.setDefaultSize(Types.CHAR, Integer.MAX_VALUE);
+ info.setDefaultSize(Types.VARCHAR, Integer.MAX_VALUE);
+ info.setDefaultSize(Types.BINARY, Integer.MAX_VALUE);
+ info.setDefaultSize(Types.VARBINARY, Integer.MAX_VALUE);
+
+ info.setStoresUpperCaseInCatalog(true);
+ info.setNonBlankCharColumnSpacePadded(false);
+ info.setBlankCharColumnSpacePadded(false);
+ info.setCharColumnSpaceTrimmed(true);
+ info.setEmptyStringNulled(false);
+
+ primaryKeyViolationSqlStates = new String[] {"23001"};
+
+ ddlReader = new H2DdlReader(log, this);
+ ddlBuilder = new H2Builder(log, this);
+ }
+
+ public String getName() {
+ return DATABASENAMES[0];
+ }
+
+ public String getDefaultSchema() {
+ if (StringUtils.isBlank(defaultSchema)) {
+ defaultSchema = (String) getSqlTemplate().queryForObject("select SCHEMA()", String.class);
+ }
+ return defaultSchema;
+ }
+
+ public String getDefaultCatalog() {
+ return null;
+ }
+
+}
\ No newline at end of file
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb/HsqlDbBuilder.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb/HsqlDbBuilder.java
new file mode 100644
index 0000000000..e428bc59af
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb/HsqlDbBuilder.java
@@ -0,0 +1,140 @@
+package org.jumpmind.db.platform.hsqldb;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.ListIterator;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.alter.AddColumnChange;
+import org.jumpmind.db.alter.RemoveColumnChange;
+import org.jumpmind.db.alter.TableChange;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.platform.AbstractDdlBuilder;
+import org.jumpmind.log.Log;
+
+/*
+ * The SQL Builder for the HsqlDb database.
+ */
+public class HsqlDbBuilder extends AbstractDdlBuilder {
+
+ public HsqlDbBuilder(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ addEscapedCharSequence("'", "''");
+ }
+
+ @Override
+ public void dropTable(Table table, StringBuilder ddl) {
+ ddl.append("DROP TABLE ");
+ printIdentifier(getTableName(table.getName()), ddl);
+ ddl.append(" IF EXISTS");
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ public String getSelectLastIdentityValues(Table table) {
+ return "CALL IDENTITY()";
+ }
+
+ @Override
+ protected void processTableStructureChanges(Database currentModel, Database desiredModel,
+ Table sourceTable, Table targetTable, List changes, StringBuilder ddl) {
+ // HsqlDb can only drop columns that are not part of a primary key
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if ((change instanceof RemoveColumnChange)
+ && ((RemoveColumnChange) change).getColumn().isPrimaryKey()) {
+ return;
+ }
+ }
+
+ // in order to utilize the ALTER TABLE ADD COLUMN BEFORE statement
+ // we have to apply the add column changes in the correct order
+ // thus we first gather all add column changes and then execute them
+ // Since we get them in target table column order, we can simply
+ // iterate backwards
+ ArrayList addColumnChanges = new ArrayList();
+
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof AddColumnChange) {
+ addColumnChanges.add((AddColumnChange) change);
+ changeIt.remove();
+ }
+ }
+
+ for (ListIterator changeIt = addColumnChanges
+ .listIterator(addColumnChanges.size()); changeIt.hasPrevious();) {
+ AddColumnChange addColumnChange = (AddColumnChange) changeIt.previous();
+
+ processChange(currentModel, desiredModel, addColumnChange, ddl);
+ changeIt.remove();
+ }
+
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof RemoveColumnChange) {
+ RemoveColumnChange removeColumnChange = (RemoveColumnChange) change;
+
+ processChange(currentModel, desiredModel, removeColumnChange, ddl);
+ changeIt.remove();
+ }
+ }
+ }
+
+ /*
+ * Processes the addition of a column to a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ AddColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD COLUMN ");
+ writeColumn(change.getChangedTable(), change.getNewColumn(), ddl);
+ if (change.getNextColumn() != null) {
+ ddl.append(" BEFORE ");
+ printIdentifier(getColumnName(change.getNextColumn()), ddl);
+ }
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a column from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemoveColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP COLUMN ");
+ printIdentifier(getColumnName(change.getColumn()), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb/HsqlDbDdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb/HsqlDbDdlReader.java
new file mode 100644
index 0000000000..c5c70c493e
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb/HsqlDbDdlReader.java
@@ -0,0 +1,92 @@
+package org.jumpmind.db.platform.hsqldb;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.Map;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractJdbcDdlReader;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.log.Log;
+
+/*
+ * Reads a database model from a HsqlDb database.
+ */
+public class HsqlDbDdlReader extends AbstractJdbcDdlReader {
+
+ public HsqlDbDdlReader(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ setDefaultCatalogPattern(null);
+ setDefaultSchemaPattern(null);
+ }
+
+ @Override
+ protected Table readTable(Connection connection, DatabaseMetaDataWrapper metaData,
+ Map values) throws SQLException {
+ Table table = super.readTable(connection, metaData, values);
+
+ if (table != null) {
+ // For at least version 1.7.2 we have to determine the
+ // auto-increment columns from a result set meta data because the
+ // database does not put this info into the database metadata
+ // Since Hsqldb only allows IDENTITY for primary key columns, we
+ // restrict our search to those columns
+ determineAutoIncrementFromResultSetMetaData(connection, table,
+ table.getPrimaryKeyColumns());
+ }
+
+ return table;
+ }
+
+ @Override
+ protected Column readColumn(DatabaseMetaDataWrapper metaData, Map values)
+ throws SQLException {
+ Column column = super.readColumn(metaData, values);
+
+ if (TypeMap.isTextType(column.getTypeCode()) && (column.getDefaultValue() != null)) {
+ column.setDefaultValue(unescape(column.getDefaultValue(), "'", "''"));
+ }
+ return column;
+ }
+
+ @Override
+ protected boolean isInternalForeignKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, ForeignKey fk, IIndex index) {
+ String name = index.getName();
+
+ return (name != null) && name.startsWith("SYS_IDX_");
+ }
+
+ @Override
+ protected boolean isInternalPrimaryKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, IIndex index) {
+ String name = index.getName();
+
+ return (name != null) && (name.startsWith("SYS_PK_") || name.startsWith("SYS_IDX_"));
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb/HsqlDbPlatform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb/HsqlDbPlatform.java
new file mode 100644
index 0000000000..581e8dff8f
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb/HsqlDbPlatform.java
@@ -0,0 +1,99 @@
+package org.jumpmind.db.platform.hsqldb;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+
+import javax.sql.DataSource;
+
+import org.jumpmind.db.platform.AbstractJdbcDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.log.Log;
+
+/*
+ * The platform implementation for the HsqlDb database.
+ */
+public class HsqlDbPlatform extends AbstractJdbcDatabasePlatform {
+ /* Database name of this platform. */
+ public static final String DATABASENAME = "HsqlDb";
+
+ /* The standard Hsqldb jdbc driver. */
+ public static final String JDBC_DRIVER = "org.hsqldb.jdbcDriver";
+
+ /* The subprotocol used by the standard Hsqldb driver. */
+ public static final String JDBC_SUBPROTOCOL = "hsqldb";
+
+ /*
+ * Creates a new instance of the Hsqldb platform.
+ */
+ public HsqlDbPlatform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+
+ info.setNonPKIdentityColumnsSupported(false);
+ info.setIdentityOverrideAllowed(false);
+ info.setSystemForeignKeyIndicesAlwaysNonUnique(true);
+
+ info.addNativeTypeMapping(Types.ARRAY, "LONGVARBINARY", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.BLOB, "LONGVARBINARY", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.CLOB, "LONGVARCHAR", Types.LONGVARCHAR);
+ info.addNativeTypeMapping(Types.DISTINCT, "LONGVARBINARY", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.FLOAT, "DOUBLE", Types.DOUBLE);
+ info.addNativeTypeMapping(Types.JAVA_OBJECT, "OBJECT");
+ info.addNativeTypeMapping(Types.NULL, "LONGVARBINARY", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.REF, "LONGVARBINARY", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.STRUCT, "LONGVARBINARY", Types.LONGVARBINARY);
+ // JDBC's TINYINT requires a value range of -255 to 255, but HsqlDb's is
+ // only -128 to 127
+ info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.SMALLINT);
+
+ info.addNativeTypeMapping("BIT", "BOOLEAN", "BOOLEAN");
+ info.addNativeTypeMapping("DATALINK", "LONGVARBINARY", "LONGVARBINARY");
+
+ info.setDefaultSize(Types.CHAR, Integer.MAX_VALUE);
+ info.setDefaultSize(Types.VARCHAR, Integer.MAX_VALUE);
+ info.setDefaultSize(Types.BINARY, Integer.MAX_VALUE);
+ info.setDefaultSize(Types.VARBINARY, Integer.MAX_VALUE);
+
+ info.setStoresUpperCaseInCatalog(true);
+ info.setNonBlankCharColumnSpacePadded(true);
+ info.setBlankCharColumnSpacePadded(true);
+ info.setCharColumnSpaceTrimmed(false);
+ info.setEmptyStringNulled(false);
+
+ primaryKeyViolationSqlStates = new String[] {"23505"};
+
+ ddlReader = new HsqlDbDdlReader(log, this);
+ ddlBuilder = new HsqlDbBuilder(log, this);
+
+ }
+
+ public String getName() {
+ return DATABASENAME;
+ }
+
+ public String getDefaultCatalog() {
+ return null;
+ }
+
+ public String getDefaultSchema() {
+ return null;
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb/package.html b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb/package.html
new file mode 100644
index 0000000000..10b7c3f2cc
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb/package.html
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+
+
+ This package contains the platform implementation for the
+ HSQLDB database.
+
+
+
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb2/HsqlDb2Builder.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb2/HsqlDb2Builder.java
new file mode 100644
index 0000000000..1159f2a78f
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb2/HsqlDb2Builder.java
@@ -0,0 +1,178 @@
+package org.jumpmind.db.platform.hsqldb2;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.ListIterator;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.alter.AddColumnChange;
+import org.jumpmind.db.alter.ColumnDataTypeChange;
+import org.jumpmind.db.alter.ColumnSizeChange;
+import org.jumpmind.db.alter.RemoveColumnChange;
+import org.jumpmind.db.alter.TableChange;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.platform.AbstractDdlBuilder;
+import org.jumpmind.log.Log;
+
+/*
+ * The SQL Builder for the HsqlDb database.
+ */
+public class HsqlDb2Builder extends AbstractDdlBuilder {
+
+ public HsqlDb2Builder(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ addEscapedCharSequence("'", "''");
+ }
+
+ @Override
+ public void dropTable(Table table, StringBuilder ddl) {
+ ddl.append("DROP TABLE ");
+ printIdentifier(getTableName(table.getName()), ddl);
+ ddl.append(" IF EXISTS");
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ public String getSelectLastIdentityValues(Table table) {
+ return "CALL IDENTITY()";
+ }
+
+ protected boolean shouldGeneratePrimaryKeys(Column[] primaryKeyColumns) {
+ if (primaryKeyColumns != null && primaryKeyColumns.length == 1) {
+ return !primaryKeyColumns[0].isAutoIncrement();
+ } else {
+ return true;
+ }
+ }
+
+ @Override
+ protected void processTableStructureChanges(Database currentModel, Database desiredModel,
+ Table sourceTable, Table targetTable, List changes, StringBuilder ddl) {
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ // HsqlDb can only drop columns that are not part of a primary key
+ if ((change instanceof RemoveColumnChange)
+ && ((RemoveColumnChange) change).getColumn().isPrimaryKey()) {
+ changeIt.remove();
+ }
+
+ // LONGVARCHAR columns always report changes
+ if (change instanceof ColumnSizeChange) {
+ ColumnSizeChange sizeChange = (ColumnSizeChange) change;
+ if (sizeChange.getChangedColumn().getTypeCode() == Types.VARCHAR
+ && sizeChange.getNewSize() == 0) {
+ changeIt.remove();
+ }
+ }
+
+ // LONGVARCHAR columns always report changes
+ if (change instanceof ColumnDataTypeChange) {
+ ColumnDataTypeChange dataTypeChange = (ColumnDataTypeChange) change;
+ if (dataTypeChange.getChangedColumn().getTypeCode() == Types.VARCHAR
+ && dataTypeChange.getNewTypeCode() == Types.LONGVARCHAR) {
+ changeIt.remove();
+ }
+ }
+ }
+
+ // in order to utilize the ALTER TABLE ADD COLUMN BEFORE statement
+ // we have to apply the add column changes in the correct order
+ // thus we first gather all add column changes and then execute them
+ // Since we get them in target table column order, we can simply
+ // iterate backwards
+ ArrayList addColumnChanges = new ArrayList();
+
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof AddColumnChange) {
+ addColumnChanges.add((AddColumnChange) change);
+ changeIt.remove();
+ }
+ }
+
+ for (ListIterator changeIt = addColumnChanges
+ .listIterator(addColumnChanges.size()); changeIt.hasPrevious();) {
+ AddColumnChange addColumnChange = changeIt.previous();
+
+ processChange(currentModel, desiredModel, addColumnChange, ddl);
+ changeIt.remove();
+ }
+
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof RemoveColumnChange) {
+ RemoveColumnChange removeColumnChange = (RemoveColumnChange) change;
+
+ processChange(currentModel, desiredModel, removeColumnChange, ddl);
+ changeIt.remove();
+ }
+ }
+ }
+
+ /*
+ * Processes the addition of a column to a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ AddColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD COLUMN ");
+ writeColumn(change.getChangedTable(), change.getNewColumn(), ddl);
+ if (change.getNextColumn() != null) {
+ ddl.append(" BEFORE ");
+ printIdentifier(getColumnName(change.getNextColumn()), ddl);
+ }
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a column from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemoveColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP COLUMN ");
+ printIdentifier(getColumnName(change.getColumn()), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ @Override
+ public void writeExternalIndexDropStmt(Table table, IIndex index, StringBuilder ddl) {
+ ddl.append("DROP INDEX ");
+ printIdentifier(getIndexName(index), ddl);
+ printEndOfStatement(ddl);
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb2/HsqlDb2DdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb2/HsqlDb2DdlReader.java
new file mode 100644
index 0000000000..45fb945698
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb2/HsqlDb2DdlReader.java
@@ -0,0 +1,79 @@
+package org.jumpmind.db.platform.hsqldb2;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.Map;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractJdbcDdlReader;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.log.Log;
+
+/*
+ * Reads a database model from a HsqlDb database.
+ */
+public class HsqlDb2DdlReader extends AbstractJdbcDdlReader {
+
+ public HsqlDb2DdlReader(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ setDefaultCatalogPattern(null);
+ setDefaultSchemaPattern(null);
+ }
+
+ @Override
+ protected Column readColumn(DatabaseMetaDataWrapper metaData, Map values)
+ throws SQLException {
+ Column column = super.readColumn(metaData, values);
+
+ if (TypeMap.isTextType(column.getTypeCode()) && (column.getDefaultValue() != null)) {
+ column.setDefaultValue(unescape(column.getDefaultValue(), "'", "''"));
+ }
+
+ String autoIncrement = (String) values.get("IS_AUTOINCREMENT");
+ if (autoIncrement != null) {
+ column.setAutoIncrement("YES".equalsIgnoreCase(autoIncrement.trim()));
+ }
+ return column;
+ }
+
+ @Override
+ protected boolean isInternalForeignKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, ForeignKey fk, IIndex index) {
+ String name = index.getName();
+
+ return (name != null) && name.startsWith("SYS_IDX_");
+ }
+
+ @Override
+ protected boolean isInternalPrimaryKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, IIndex index) {
+ String name = index.getName();
+
+ return (name != null) && (name.startsWith("SYS_PK_") || name.startsWith("SYS_IDX_"));
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb2/HsqlDb2Platform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb2/HsqlDb2Platform.java
new file mode 100644
index 0000000000..09abf9abd7
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/hsqldb2/HsqlDb2Platform.java
@@ -0,0 +1,105 @@
+package org.jumpmind.db.platform.hsqldb2;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+
+import javax.sql.DataSource;
+
+import org.jumpmind.db.platform.AbstractJdbcDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.log.Log;
+
+/*
+ * The platform implementation for the HsqlDb database.
+ */
+public class HsqlDb2Platform extends AbstractJdbcDatabasePlatform {
+
+ /* Database name of this platform. */
+ public static final String DATABASENAME = "HSQL Database Engine2";
+
+ /* The standard Hsqldb jdbc driver. */
+ public static final String JDBC_DRIVER = "org.hsqldb.jdbcDriver";
+
+ /* The subprotocol used by the standard Hsqldb driver. */
+ public static final String JDBC_SUBPROTOCOL = "hsqldb";
+
+ /*
+ * Creates a new instance of the Hsqldb platform.
+ */
+ public HsqlDb2Platform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+
+ info.setNonPKIdentityColumnsSupported(false);
+ info.setIdentityOverrideAllowed(false);
+ info.setSystemForeignKeyIndicesAlwaysNonUnique(true);
+
+ info.addNativeTypeMapping(Types.ARRAY, "LONGVARBINARY", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.BLOB, "LONGVARBINARY", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.CLOB, "LONGVARCHAR", Types.LONGVARCHAR);
+ info.addNativeTypeMapping(Types.DISTINCT, "LONGVARBINARY", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.FLOAT, "DOUBLE", Types.DOUBLE);
+ info.addNativeTypeMapping(Types.JAVA_OBJECT, "OBJECT");
+ info.addNativeTypeMapping(Types.NULL, "LONGVARBINARY", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.REF, "LONGVARBINARY", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.STRUCT, "LONGVARBINARY", Types.LONGVARBINARY);
+ // JDBC's TINYINT requires a value range of -255 to 255, but HsqlDb's is
+ // only -128 to 127
+ info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.SMALLINT);
+
+ info.addNativeTypeMapping("BIT", "BOOLEAN", "BOOLEAN");
+ info.addNativeTypeMapping("DATALINK", "LONGVARBINARY", "LONGVARBINARY");
+
+ info.setDefaultSize(Types.CHAR, Integer.MAX_VALUE);
+ info.setDefaultSize(Types.VARCHAR, Integer.MAX_VALUE);
+ info.setDefaultSize(Types.BINARY, Integer.MAX_VALUE);
+ info.setDefaultSize(Types.VARBINARY, Integer.MAX_VALUE);
+
+ info.setStoresUpperCaseInCatalog(true);
+ info.setNonBlankCharColumnSpacePadded(true);
+ info.setBlankCharColumnSpacePadded(true);
+ info.setCharColumnSpaceTrimmed(false);
+ info.setEmptyStringNulled(false);
+
+ primaryKeyViolationSqlStates = new String[] {"23505"};
+
+ ddlReader = new HsqlDb2DdlReader(log, this);
+ ddlBuilder = new HsqlDb2Builder(log, this);
+ }
+
+ public String getName() {
+ return DATABASENAME;
+ }
+
+ public String getDefaultSchema() {
+ return null;
+ }
+
+ public String getDefaultCatalog() {
+ if (defaultCatalog == null) {
+ defaultCatalog = (String) getSqlTemplate()
+ .queryForObject(
+ "select value from INFORMATION_SCHEMA.SYSTEM_SESSIONINFO where key='CURRENT SCHEMA'",
+ String.class);
+ }
+ return defaultCatalog;
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/informix/InformixBuilder.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/informix/InformixBuilder.java
new file mode 100644
index 0000000000..bb02dd5d8e
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/informix/InformixBuilder.java
@@ -0,0 +1,90 @@
+package org.jumpmind.db.platform.informix;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.alter.PrimaryKeyChange;
+import org.jumpmind.db.alter.RemovePrimaryKeyChange;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.platform.AbstractDdlBuilder;
+import org.jumpmind.log.Log;
+
+public class InformixBuilder extends AbstractDdlBuilder {
+
+ public InformixBuilder(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ }
+
+ @Override
+ protected void writeColumn(Table table, Column column, StringBuilder ddl) {
+ if (column.isAutoIncrement()) {
+ printIdentifier(getColumnName(column), ddl);
+ ddl.append(" SERIAL");
+ } else {
+ super.writeColumn(table, column, ddl);
+ }
+ }
+
+ @Override
+ public String getSelectLastIdentityValues(Table table) {
+ return "select dbinfo('sqlca.sqlerrd1') from sysmaster:sysdual";
+ }
+
+ @Override
+ protected void writeExternalPrimaryKeysCreateStmt(Table table, Column primaryKeyColumns[],
+ StringBuilder ddl) {
+ if (primaryKeyColumns.length > 0 && shouldGeneratePrimaryKeys(primaryKeyColumns)) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(table.getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD CONSTRAINT ");
+ writePrimaryKeyStmt(table, primaryKeyColumns, ddl);
+ ddl.append(" CONSTRAINT ");
+ printIdentifier(getConstraintName(null, table, "PK", null), ddl);
+ printEndOfStatement(ddl);
+ }
+ }
+
+ protected void writeExternalForeignKeyCreateStmt(Database database, Table table,
+ ForeignKey key, StringBuilder ddl) {
+ if (key.getForeignTableName() == null) {
+ log.warn("Foreign key table is null for key " + key);
+ } else {
+ writeTableAlterStmt(table, ddl);
+ ddl.append("ADD CONSTRAINT FOREIGN KEY (");
+ writeLocalReferences(key, ddl);
+ ddl.append(") REFERENCES ");
+ printIdentifier(getTableName(key.getForeignTableName()), ddl);
+ ddl.append(" (");
+ writeForeignReferences(key, ddl);
+ ddl.append(") CONSTRAINT ");
+ printIdentifier(getForeignKeyName(table, key), ddl);
+ printEndOfStatement(ddl);
+ }
+ }
+
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemovePrimaryKeyChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP CONSTRAINT ");
+ printIdentifier(getConstraintName(null, change.getChangedTable(), "PK", null), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ protected void processChange(Database currentModel, Database desiredModel,
+ PrimaryKeyChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP CONSTRAINT ");
+ printIdentifier(getConstraintName(null, change.getChangedTable(), "PK", null), ddl);
+ printEndOfStatement(ddl);
+ writeExternalPrimaryKeysCreateStmt(change.getChangedTable(),
+ change.getNewPrimaryKeyColumns(), ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/informix/InformixDdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/informix/InformixDdlReader.java
new file mode 100644
index 0000000000..fd74a69ba2
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/informix/InformixDdlReader.java
@@ -0,0 +1,103 @@
+package org.jumpmind.db.platform.informix;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Collection;
+import java.util.Map;
+
+import org.apache.commons.collections.map.ListOrderedMap;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.platform.AbstractJdbcDdlReader;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.log.Log;
+
+public class InformixDdlReader extends AbstractJdbcDdlReader {
+
+ public InformixDdlReader(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ setDefaultCatalogPattern(null);
+ setDefaultSchemaPattern(null);
+ }
+
+ @Override
+ protected Table readTable(Connection connection, DatabaseMetaDataWrapper metaData,
+ Map values) throws SQLException {
+ Table table = super.readTable(connection, metaData, values);
+ if (table != null) {
+ determineAutoIncrementFromResultSetMetaData(connection, table, table.getColumns());
+ }
+ return table;
+ }
+
+ @Override
+ protected void determineAutoIncrementFromResultSetMetaData(Connection connection, Table table,
+ Column[] columnsToCheck) throws SQLException {
+ determineAutoIncrementFromResultSetMetaData(connection, table, columnsToCheck, ":");
+ }
+
+ @Override
+ public Collection readIndices(Connection connection, DatabaseMetaDataWrapper metaData,
+ String tableName) throws SQLException {
+ String sql = "select rtrim(dbinfo('dbname')) as TABLE_CAT, st.owner as TABLE_SCHEM, st.tabname as TABLE_NAME, "
+ + "case when idxtype = 'U' then 0 else 1 end NON_UNIQUE, si.owner as INDEX_QUALIFIER, si.idxname as INDEX_NAME, "
+ + "3 as TYPE, "
+ + "case when sc.colno = si.part1 then 1 "
+ + "when sc.colno = si.part1 then 1 "
+ + "when sc.colno = si.part2 then 2 "
+ + "when sc.colno = si.part3 then 3 "
+ + "when sc.colno = si.part4 then 4 "
+ + "when sc.colno = si.part5 then 5 "
+ + "when sc.colno = si.part6 then 6 "
+ + "when sc.colno = si.part7 then 7 "
+ + "when sc.colno = si.part8 then 8 "
+ + "else 0 end as ORDINAL_POSITION, "
+ + "sc.colname as COLUMN_NAME, "
+ + "null::varchar as ASC_OR_DESC, 0 as CARDINALITY, 0 as PAGES, null::varchar as FILTER_CONDITION "
+ + "from sysindexes si "
+ + "inner join systables st on si.tabid = st.tabid "
+ + "inner join syscolumns sc on si.tabid = sc.tabid "
+ + "where st.tabname like ? "
+ + "and (sc.colno = si.part1 or sc.colno = si.part2 or sc.colno = si.part3 or "
+ + "sc.colno = si.part4 or sc.colno = si.part5 or sc.colno = si.part6 or "
+ + "sc.colno = si.part7 or sc.colno = si.part8)";
+ PreparedStatement ps = connection.prepareStatement(sql);
+ ps.setString(1, tableName);
+
+ ResultSet rs = ps.executeQuery();
+
+ Map indices = new ListOrderedMap();
+ while (rs.next()) {
+ Map values = readColumns(rs, getColumnsForIndex());
+ readIndex(metaData, values, indices);
+ }
+
+ rs.close();
+ ps.close();
+ return indices.values();
+ }
+
+ @Override
+ public void removeSystemIndices(Connection connection, DatabaseMetaDataWrapper metaData,
+ Table table) throws SQLException {
+ super.removeSystemIndices(connection, metaData, table);
+ }
+
+ @Override
+ protected boolean isInternalPrimaryKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, IIndex index) throws SQLException {
+ return index.getName().startsWith(" ");
+ }
+
+ @Override
+ protected boolean isInternalForeignKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, ForeignKey fk, IIndex index1)
+ throws SQLException {
+ return fk.getName().startsWith(" ");
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/informix/InformixPlatform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/informix/InformixPlatform.java
new file mode 100644
index 0000000000..6325ce3cb7
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/informix/InformixPlatform.java
@@ -0,0 +1,79 @@
+package org.jumpmind.db.platform.informix;
+
+import java.sql.Types;
+import java.util.Map;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.platform.AbstractJdbcDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.log.Log;
+
+public class InformixPlatform extends AbstractJdbcDatabasePlatform implements IDatabasePlatform {
+
+ public static final String DATABASENAME = "Informix Dynamic Server11";
+
+ public static final String JDBC_DRIVER = "com.informix.jdbc.IfxDriver";
+
+ public static final String JDBC_SUBPROTOCOL = "informix-sqli";
+
+ public InformixPlatform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+
+ info.addNativeTypeMapping(Types.VARCHAR, "VARCHAR", Types.VARCHAR);
+ info.addNativeTypeMapping(Types.LONGVARCHAR, "LVARCHAR", Types.LONGVARCHAR);
+ info.addNativeTypeMapping(Types.LONGVARBINARY, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.TIMESTAMP, "DATETIME YEAR TO FRACTION", Types.TIMESTAMP);
+ info.addNativeTypeMapping(Types.TIME, "DATETIME YEAR TO FRACTION", Types.TIMESTAMP);
+ info.addNativeTypeMapping(Types.BINARY, "BYTE", Types.BINARY);
+ info.addNativeTypeMapping(Types.VARBINARY, "BYTE", Types.BINARY);
+
+ info.addNativeTypeMapping(Types.BIT, "BOOLEAN", Types.BOOLEAN);
+ info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping(Types.DOUBLE, "FLOAT", Types.DOUBLE);
+
+ info.setDefaultSize(Types.VARCHAR, 255);
+ info.setDefaultSize(Types.CHAR, 255);
+
+ info.setAlterTableForDropUsed(true);
+ info.setSystemIndicesReturned(true);
+
+ info.setNonBlankCharColumnSpacePadded(true);
+ info.setBlankCharColumnSpacePadded(true);
+ info.setCharColumnSpaceTrimmed(false);
+ info.setEmptyStringNulled(false);
+ info.setAutoIncrementUpdateAllowed(false);
+
+ Map env = System.getenv();
+ String clientIdentifierMode = env.get("DELIMIDENT");
+ if (clientIdentifierMode != null && clientIdentifierMode.equalsIgnoreCase("y")) {
+ info.setIdentifierQuoteString("\"");
+ }
+
+ primaryKeyViolationCodes = new int[] {-268};
+
+ ddlReader = new InformixDdlReader(log, this);
+ ddlBuilder = new InformixBuilder(log, this);
+ }
+
+ public String getName() {
+ return DATABASENAME;
+ }
+
+
+ public String getDefaultCatalog() {
+ return null;
+ }
+
+ public String getDefaultSchema() {
+ if (StringUtils.isBlank(defaultSchema)) {
+ defaultSchema = getSqlTemplate().queryForObject("select trim(user) from sysmaster:sysdual",
+ String.class);
+ }
+ return defaultSchema;
+ }
+
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/interbase/InterbaseBuilder.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/interbase/InterbaseBuilder.java
new file mode 100644
index 0000000000..761fdfa73b
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/interbase/InterbaseBuilder.java
@@ -0,0 +1,324 @@
+package org.jumpmind.db.platform.interbase;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+import java.util.Iterator;
+import java.util.List;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.alter.AddColumnChange;
+import org.jumpmind.db.alter.AddPrimaryKeyChange;
+import org.jumpmind.db.alter.RemoveColumnChange;
+import org.jumpmind.db.alter.TableChange;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.platform.AbstractDdlBuilder;
+import org.jumpmind.db.platform.PlatformUtils;
+import org.jumpmind.log.Log;
+
+/*
+ * The SQL Builder for the Interbase database.
+ */
+public class InterbaseBuilder extends AbstractDdlBuilder {
+
+ public InterbaseBuilder(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ addEscapedCharSequence("'", "''");
+ }
+
+ @Override
+ public void createTable(Table table, StringBuilder ddl) {
+ super.createTable(table, ddl);
+
+ // creating generator and trigger for auto-increment
+ Column[] columns = table.getAutoIncrementColumns();
+
+ for (int idx = 0; idx < columns.length; idx++) {
+ writeAutoIncrementCreateStmts(table, columns[idx], ddl);
+ }
+ }
+
+ @Override
+ protected String getNativeDefaultValue(Column column) {
+ if ((column.getTypeCode() == Types.BIT)
+ || (PlatformUtils.supportsJava14JdbcTypes() && (column.getTypeCode() == PlatformUtils
+ .determineBooleanTypeCode()))) {
+ return getDefaultValueHelper().convert(column.getDefaultValue(), column.getTypeCode(),
+ Types.SMALLINT).toString();
+ } else {
+ return super.getNativeDefaultValue(column);
+ }
+ }
+
+ @Override
+ public void dropTable(Table table, StringBuilder ddl) {
+ // dropping generators for auto-increment
+ Column[] columns = table.getAutoIncrementColumns();
+
+ for (int idx = 0; idx < columns.length; idx++) {
+ writeAutoIncrementDropStmts(table, columns[idx], ddl);
+ }
+ super.dropTable(table, ddl);
+ }
+
+ @Override
+ public void writeExternalIndexDropStmt(Table table, IIndex index, StringBuilder ddl) {
+ // Index names in Interbase are unique to a schema and hence we do not
+ // need the ON clause
+ ddl.append("DROP INDEX ");
+ printIdentifier(getIndexName(index), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ /*
+ * Writes the creation statements to make the given column an auto-increment
+ * column.
+ */
+ private void writeAutoIncrementCreateStmts(Table table, Column column,
+ StringBuilder ddl) {
+ ddl.append("CREATE GENERATOR ");
+ printIdentifier(getGeneratorName(table, column), ddl);
+ printEndOfStatement(ddl);
+
+ ddl.append("CREATE TRIGGER ");
+ printIdentifier(getTriggerName(table, column), ddl);
+ ddl.append(" FOR ");
+ printlnIdentifier(getTableName(table.getName()), ddl);
+ println("ACTIVE BEFORE INSERT POSITION 0 AS", ddl);
+ ddl.append("BEGIN IF (NEW.");
+ printIdentifier(getColumnName(column), ddl);
+ ddl.append(" IS NULL) THEN NEW.");
+ printIdentifier(getColumnName(column), ddl);
+ ddl.append(" = GEN_ID(");
+ printIdentifier(getGeneratorName(table, column), ddl);
+ ddl.append(", 1); END");
+ printEndOfStatement(ddl);
+ }
+
+ /*
+ * Writes the statements to drop the auto-increment status for the given
+ * column.
+ */
+ private void writeAutoIncrementDropStmts(Table table, Column column, StringBuilder ddl) {
+ ddl.append("DROP TRIGGER ");
+ printIdentifier(getTriggerName(table, column), ddl);
+ printEndOfStatement(ddl);
+
+ ddl.append("DROP GENERATOR ");
+ printIdentifier(getGeneratorName(table, column), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ /*
+ * Determines the name of the trigger for an auto-increment column.
+ *
+ * @param table The table
+ *
+ * @param column The auto-increment column
+ *
+ * @return The trigger name
+ */
+ protected String getTriggerName(Table table, Column column) {
+ String secondPart = column.getName();
+ // make sure a backup table gets a different name than the original
+ if (table.getName().endsWith("_")) {
+ secondPart += "_";
+ }
+ return getConstraintName("trg", table, secondPart, null);
+ }
+
+ /*
+ * Determines the name of the generator for an auto-increment column.
+ *
+ * @param table The table
+ *
+ * @param column The auto-increment column
+ *
+ * @return The generator name
+ */
+ protected String getGeneratorName(Table table, Column column) {
+ String secondPart = column.getName();
+ // make sure a backup table gets a different name than the original
+ if (table.getName().endsWith("_")) {
+ secondPart += "_";
+ }
+ return getConstraintName("gen", table, secondPart, null);
+ }
+
+ @Override
+ protected void writeColumnAutoIncrementStmt(Table table, Column column, StringBuilder ddl) {
+ // we're using a generator
+ }
+
+ @Override
+ public String getSelectLastIdentityValues(Table table) {
+ Column[] columns = table.getAutoIncrementColumns();
+
+ if (columns.length == 0) {
+ return null;
+ } else {
+ StringBuffer result = new StringBuffer();
+
+ result.append("SELECT ");
+ for (int idx = 0; idx < columns.length; idx++) {
+ result.append("GEN_ID(");
+ result.append(getDelimitedIdentifier(getGeneratorName(table, columns[idx])));
+ result.append(", 0)");
+ }
+ result.append(" FROM RDB$DATABASE");
+ return result.toString();
+ }
+ }
+
+ public String fixLastIdentityValues(Table table) {
+ Column[] columns = table.getAutoIncrementColumns();
+
+ if (columns.length == 0) {
+ return null;
+ } else {
+ StringBuffer result = new StringBuffer();
+
+ result.append("SELECT ");
+ for (int idx = 0; idx < columns.length; idx++) {
+ result.append("GEN_ID(");
+ result.append(getDelimitedIdentifier(getGeneratorName(table, columns[idx])));
+ result.append(", (SELECT MAX(").append(columns[idx].getName()).append(")+1 FROM ");
+ result.append(table.getName()).append("))");
+ }
+ result.append(" FROM RDB$DATABASE");
+ return result.toString();
+ }
+ }
+
+ @Override
+ protected void processTableStructureChanges(Database currentModel, Database desiredModel,
+ Table sourceTable, Table targetTable, List changes, StringBuilder ddl) {
+ // TODO: Dropping of primary keys is currently not supported because we
+ // cannot
+ // determine the pk constraint names and drop them in one go
+ // (We could used a stored procedure if Interbase would allow them to
+ // use DDL)
+ // This will be easier once named primary keys are supported
+ boolean pkColumnAdded = false;
+
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof AddColumnChange) {
+ AddColumnChange addColumnChange = (AddColumnChange) change;
+
+ // TODO: we cannot add columns to the primary key this way
+ // because we would have to drop the pk first and then
+ // add a new one afterwards which is not supported yet
+ if (addColumnChange.getNewColumn().isPrimaryKey()) {
+ pkColumnAdded = true;
+ } else {
+ processChange(currentModel, desiredModel, addColumnChange, ddl);
+ changeIt.remove();
+ }
+ } else if (change instanceof RemoveColumnChange) {
+ RemoveColumnChange removeColumnChange = (RemoveColumnChange) change;
+
+ // TODO: we cannot drop primary key columns this way
+ // because we would have to drop the pk first and then
+ // add a new one afterwards which is not supported yet
+ if (!removeColumnChange.getColumn().isPrimaryKey()) {
+ processChange(currentModel, desiredModel, removeColumnChange, ddl);
+ changeIt.remove();
+ }
+ }
+ }
+
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ // we can only add a primary key if all columns are present in the
+ // table
+ // i.e. none was added during this alteration
+ if ((change instanceof AddPrimaryKeyChange) && !pkColumnAdded) {
+ processChange(currentModel, desiredModel, (AddPrimaryKeyChange) change, ddl);
+ changeIt.remove();
+ }
+ }
+ }
+
+ /*
+ * Processes the addition of a column to a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ AddColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD ");
+ writeColumn(change.getChangedTable(), change.getNewColumn(), ddl);
+ printEndOfStatement(ddl);
+
+ Table curTable = currentModel.findTable(change.getChangedTable().getName(),
+ platform.isDelimitedIdentifierModeOn());
+
+ if (!change.isAtEnd()) {
+ Column prevColumn = change.getPreviousColumn();
+
+ if (prevColumn != null) {
+ // we need the corresponding column object from the current
+ // table
+ prevColumn = curTable.findColumn(prevColumn.getName(),
+ platform.isDelimitedIdentifierModeOn());
+ }
+ // Even though Interbase can only add columns, we can move them
+ // later on
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ALTER ");
+ printIdentifier(getColumnName(change.getNewColumn()), ddl);
+ ddl.append(" POSITION ");
+ // column positions start at 1 in Interbase
+ ddl.append(prevColumn == null ? "1" : String.valueOf(curTable
+ .getColumnIndex(prevColumn) + 1));
+ printEndOfStatement(ddl);
+ }
+ if (change.getNewColumn().isAutoIncrement()) {
+ writeAutoIncrementCreateStmts(curTable, change.getNewColumn(), ddl);
+ }
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a column from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemoveColumnChange change, StringBuilder ddl) {
+ if (change.getColumn().isAutoIncrement()) {
+ writeAutoIncrementDropStmts(change.getChangedTable(), change.getColumn(), ddl);
+ }
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP ");
+ printIdentifier(getColumnName(change.getColumn()), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/interbase/InterbaseDdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/interbase/InterbaseDdlReader.java
new file mode 100644
index 0000000000..486ac69f81
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/interbase/InterbaseDdlReader.java
@@ -0,0 +1,456 @@
+package org.jumpmind.db.platform.interbase;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.collections.map.ListOrderedMap;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractJdbcDdlReader;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.log.Log;
+
+/*
+ * The Jdbc Model Reader for Interbase.
+ */
+public class InterbaseDdlReader extends AbstractJdbcDdlReader {
+
+ public InterbaseDdlReader(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ setDefaultCatalogPattern(null);
+ setDefaultSchemaPattern(null);
+ setDefaultTablePattern("%");
+ setDefaultColumnPattern("%");
+ }
+
+ @Override
+ protected Table readTable(Connection connection, DatabaseMetaDataWrapper metaData,
+ Map values) throws SQLException {
+ Table table = super.readTable(connection, metaData, values);
+
+ if (table != null) {
+ determineExtraColumnInfo(connection, table);
+ determineAutoIncrementColumns(connection, table);
+ adjustColumns(table);
+ }
+
+ return table;
+ }
+
+ protected Column readColumn(DatabaseMetaDataWrapper metaData, Map values)
+ throws SQLException {
+ Column column = super.readColumn(metaData, values);
+ if (column.getTypeCode() == Types.VARCHAR) {
+ int size = Integer.parseInt(column.getSize());
+ if (size >= InterbasePlatform.SWITCH_TO_LONGVARCHAR_SIZE) {
+ column.setTypeCode(Types.LONGVARCHAR);
+ }
+ }
+ return column;
+ }
+
+ @Override
+ protected Collection readColumns(DatabaseMetaDataWrapper metaData, String tableName)
+ throws SQLException {
+ ResultSet columnData = null;
+
+ try {
+ List columns = new ArrayList();
+
+ if (getPlatform().isDelimitedIdentifierModeOn()) {
+ // Jaybird has a problem when delimited identifiers are used as
+ // it is not able to find the columns for the table
+ // So we have to filter manually below
+ columnData = metaData.getColumns(getDefaultTablePattern(),
+ getDefaultColumnPattern());
+
+ while (columnData.next()) {
+ Map values = readColumns(columnData, getColumnsForColumn());
+
+ if (tableName.equals(values.get("TABLE_NAME"))) {
+ columns.add(readColumn(metaData, values));
+ }
+ }
+ } else {
+ columnData = metaData.getColumns(tableName, getDefaultColumnPattern());
+
+ while (columnData.next()) {
+ Map values = readColumns(columnData, getColumnsForColumn());
+
+ columns.add(readColumn(metaData, values));
+ }
+ }
+
+ return columns;
+ } finally {
+ if (columnData != null) {
+ columnData.close();
+ }
+ }
+ }
+
+ /*
+ * Helper method that determines extra column info from the system tables:
+ * default value, precision, scale.
+ *
+ * @param table The table
+ */
+ protected void determineExtraColumnInfo(Connection connection, Table table) throws SQLException {
+ StringBuffer query = new StringBuffer();
+
+ query.append("SELECT a.RDB$FIELD_NAME, a.RDB$DEFAULT_SOURCE, b.RDB$FIELD_PRECISION, b.RDB$FIELD_SCALE,");
+ query.append(" b.RDB$FIELD_TYPE, b.RDB$FIELD_SUB_TYPE FROM RDB$RELATION_FIELDS a, RDB$FIELDS b");
+ query.append(" WHERE a.RDB$RELATION_NAME=? AND a.RDB$FIELD_SOURCE=b.RDB$FIELD_NAME");
+
+ PreparedStatement prepStmt = connection.prepareStatement(query.toString());
+
+ try {
+ prepStmt.setString(1, getPlatform().isDelimitedIdentifierModeOn() ? table.getName()
+ : table.getName().toUpperCase());
+
+ ResultSet rs = prepStmt.executeQuery();
+
+ while (rs.next()) {
+ String columnName = rs.getString(1).trim();
+ Column column = table.findColumn(columnName, getPlatform()
+ .isDelimitedIdentifierModeOn());
+
+ if (column != null) {
+ String defaultValue = rs.getString(2);
+
+ if (!rs.wasNull() && (defaultValue != null)) {
+ defaultValue = defaultValue.trim();
+ if (defaultValue.startsWith("DEFAULT ")) {
+ defaultValue = defaultValue.substring("DEFAULT ".length());
+ }
+ column.setDefaultValue(defaultValue);
+ }
+
+ short precision = rs.getShort(3);
+ boolean precisionSpecified = !rs.wasNull();
+ short scale = rs.getShort(4);
+ boolean scaleSpecified = !rs.wasNull();
+
+ if (precisionSpecified) {
+ // for some reason, Interbase stores the negative scale
+ column.setSizeAndScale(precision, scaleSpecified ? -scale : 0);
+ }
+
+ short dbType = rs.getShort(5);
+ short blobSubType = rs.getShort(6);
+
+ // CLOBs are returned by the driver as VARCHAR
+ if (!rs.wasNull() && (dbType == 261) && (blobSubType == 1)) {
+ column.setTypeCode(Types.CLOB);
+ }
+ }
+ }
+ rs.close();
+ } finally {
+ prepStmt.close();
+ }
+ }
+
+ /*
+ * Helper method that determines the auto increment status using Interbase's
+ * system tables.
+ *
+ * @param table The table
+ */
+ protected void determineAutoIncrementColumns(Connection connection, Table table)
+ throws SQLException {
+ // Since for long table and column names, the generator name will be
+ // shortened
+ // we have to determine for each column whether there is a generator for
+ // it
+ InterbaseBuilder builder = (InterbaseBuilder) getPlatform().getDdlBuilder();
+ Column[] columns = table.getColumns();
+ HashMap names = new HashMap();
+ String name;
+
+ for (int idx = 0; idx < columns.length; idx++) {
+ name = builder.getGeneratorName(table, columns[idx]);
+ if (!getPlatform().isDelimitedIdentifierModeOn()) {
+ name = name.toUpperCase();
+ }
+ names.put(name, columns[idx]);
+ }
+
+ Statement stmt = connection.createStatement();
+
+ try {
+ ResultSet rs = stmt.executeQuery("SELECT RDB$GENERATOR_NAME FROM RDB$GENERATORS");
+
+ while (rs.next()) {
+ String generatorName = rs.getString(1).trim();
+ Column column = (Column) names.get(generatorName);
+
+ if (column != null) {
+ column.setAutoIncrement(true);
+ }
+ }
+ rs.close();
+ } finally {
+ stmt.close();
+ }
+ }
+
+ /*
+ * Adjusts the columns in the table by fixing types and default values.
+ *
+ * @param table The table
+ */
+ protected void adjustColumns(Table table) {
+ Column[] columns = table.getColumns();
+
+ for (int idx = 0; idx < columns.length; idx++) {
+ if (columns[idx].getTypeCode() == Types.FLOAT) {
+ columns[idx].setTypeCode(Types.REAL);
+ } else if ((columns[idx].getTypeCode() == Types.NUMERIC)
+ || (columns[idx].getTypeCode() == Types.DECIMAL)) {
+ if ((columns[idx].getTypeCode() == Types.NUMERIC)
+ && (columns[idx].getSizeAsInt() == 18) && (columns[idx].getScale() == 0)) {
+ columns[idx].setTypeCode(Types.BIGINT);
+ }
+ } else if (TypeMap.isTextType(columns[idx].getTypeCode())) {
+ columns[idx].setDefaultValue(unescape(columns[idx].getDefaultValue(), "'", "''"));
+ }
+ }
+ }
+
+ @Override
+ protected Collection readPrimaryKeyNames(DatabaseMetaDataWrapper metaData,
+ String tableName) throws SQLException {
+ List pks = new ArrayList();
+ ResultSet pkData = null;
+
+ try {
+ if (getPlatform().isDelimitedIdentifierModeOn()) {
+ // Jaybird has a problem when delimited identifiers are used as
+ // it is not able to find the primary key info for the table
+ // So we have to filter manually below
+ pkData = metaData.getPrimaryKeys(getDefaultTablePattern());
+ while (pkData.next()) {
+ Map values = readColumns(pkData, getColumnsForPK());
+
+ if (tableName.equals(values.get("TABLE_NAME"))) {
+ pks.add(readPrimaryKeyName(metaData, values));
+ }
+ }
+ } else {
+ pkData = metaData.getPrimaryKeys(tableName);
+ while (pkData.next()) {
+ Map values = readColumns(pkData, getColumnsForPK());
+
+ pks.add(readPrimaryKeyName(metaData, values));
+ }
+ }
+ } finally {
+ if (pkData != null) {
+ pkData.close();
+ }
+ }
+ return pks;
+ }
+
+ @Override
+ protected Collection readForeignKeys(Connection connection, DatabaseMetaDataWrapper metaData,
+ String tableName) throws SQLException {
+ Map fks = new ListOrderedMap();
+ ResultSet fkData = null;
+
+ try {
+ if (getPlatform().isDelimitedIdentifierModeOn()) {
+ // Jaybird has a problem when delimited identifiers are used as
+ // it is not able to find the foreign key info for the table
+ // So we have to filter manually below
+ fkData = metaData.getForeignKeys(getDefaultTablePattern());
+ while (fkData.next()) {
+ Map values = readColumns(fkData, getColumnsForFK());
+
+ if (tableName.equals(values.get("FKTABLE_NAME"))) {
+ readForeignKey(metaData, values, fks);
+ }
+ }
+ } else {
+ fkData = metaData.getForeignKeys(tableName);
+ while (fkData.next()) {
+ Map values = readColumns(fkData, getColumnsForFK());
+
+ readForeignKey(metaData, values, fks);
+ }
+ }
+ } finally {
+ if (fkData != null) {
+ fkData.close();
+ }
+ }
+ return fks.values();
+ }
+
+ protected boolean isInternalPrimaryKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, IIndex index) throws SQLException {
+ InterbaseBuilder builder = (InterbaseBuilder) getPlatform().getDdlBuilder();
+ String tableName = builder.getTableName(table.getName());
+ String indexName = builder.getIndexName(index);
+ StringBuffer query = new StringBuffer();
+
+ query.append("SELECT RDB$CONSTRAINT_NAME FROM RDB$RELATION_CONSTRAINTS where RDB$RELATION_NAME=? AND RDB$CONSTRAINT_TYPE=? AND RDB$INDEX_NAME=?");
+
+ PreparedStatement stmt = connection.prepareStatement(query.toString());
+
+ try {
+ stmt.setString(
+ 1,
+ getPlatform().isDelimitedIdentifierModeOn() ? tableName : tableName
+ .toUpperCase());
+ stmt.setString(2, "PRIMARY KEY");
+ stmt.setString(3, indexName);
+
+ ResultSet resultSet = stmt.executeQuery();
+
+ return resultSet.next();
+ } finally {
+ if (stmt != null) {
+ stmt.close();
+ }
+ }
+ }
+
+ protected boolean isInternalForeignKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, ForeignKey fk, IIndex index)
+ throws SQLException {
+ InterbaseBuilder builder = (InterbaseBuilder) getPlatform().getDdlBuilder();
+ String tableName = builder.getTableName(table.getName());
+ String indexName = builder.getIndexName(index);
+ String fkName = builder.getForeignKeyName(table, fk);
+ StringBuffer query = new StringBuffer();
+
+ query.append("SELECT RDB$CONSTRAINT_NAME FROM RDB$RELATION_CONSTRAINTS where RDB$RELATION_NAME=? AND RDB$CONSTRAINT_TYPE=? AND RDB$CONSTRAINT_NAME=? AND RDB$INDEX_NAME=?");
+
+ PreparedStatement stmt = connection.prepareStatement(query.toString());
+
+ try {
+ stmt.setString(
+ 1,
+ getPlatform().isDelimitedIdentifierModeOn() ? tableName : tableName
+ .toUpperCase());
+ stmt.setString(2, "FOREIGN KEY");
+ stmt.setString(3, fkName);
+ stmt.setString(4, indexName);
+
+ ResultSet resultSet = stmt.executeQuery();
+
+ return resultSet.next();
+ } finally {
+ if (stmt != null) {
+ stmt.close();
+ }
+ }
+ }
+
+ @Override
+ public String determineSchemaOf(Connection connection, String schemaPattern, Table table)
+ throws SQLException {
+ ResultSet tableData = null;
+ ResultSet columnData = null;
+
+ try {
+ DatabaseMetaDataWrapper metaData = new DatabaseMetaDataWrapper();
+
+ metaData.setMetaData(connection.getMetaData());
+ metaData.setCatalog(getDefaultCatalogPattern());
+ metaData.setSchemaPattern(schemaPattern == null ? getDefaultSchemaPattern()
+ : schemaPattern);
+ metaData.setTableTypes(getDefaultTableTypes());
+
+ String tablePattern = table.getName();
+
+ if (getPlatform().isDelimitedIdentifierModeOn()) {
+ tablePattern = tablePattern.toUpperCase();
+ }
+
+ tableData = metaData.getTables(tablePattern);
+
+ boolean found = false;
+ String schema = null;
+
+ while (!found && tableData.next()) {
+ Map values = readColumns(tableData, getColumnsForTable());
+ String tableName = (String) values.get("TABLE_NAME");
+
+ if ((tableName != null) && (tableName.length() > 0)) {
+ schema = (String) values.get("TABLE_SCHEM");
+ found = true;
+
+ if (getPlatform().isDelimitedIdentifierModeOn()) {
+ // Jaybird has a problem when delimited identifiers are
+ // used as
+ // it is not able to find the columns for the table
+ // So we have to filter manually below
+ columnData = metaData.getColumns(getDefaultTablePattern(),
+ getDefaultColumnPattern());
+ } else {
+ columnData = metaData.getColumns(tableName, getDefaultColumnPattern());
+ }
+
+ while (found && columnData.next()) {
+ values = readColumns(columnData, getColumnsForColumn());
+
+ if (getPlatform().isDelimitedIdentifierModeOn()
+ && !tableName.equals(values.get("TABLE_NAME"))) {
+ continue;
+ }
+
+ if (table.findColumn((String) values.get("COLUMN_NAME"), getPlatform()
+ .isDelimitedIdentifierModeOn()) == null) {
+ found = false;
+ }
+ }
+ columnData.close();
+ columnData = null;
+ }
+ }
+ return found ? schema : null;
+ } finally {
+ if (columnData != null) {
+ columnData.close();
+ }
+ if (tableData != null) {
+ tableData.close();
+ }
+ }
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/interbase/InterbasePlatform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/interbase/InterbasePlatform.java
new file mode 100644
index 0000000000..b459c30e77
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/interbase/InterbasePlatform.java
@@ -0,0 +1,115 @@
+package org.jumpmind.db.platform.interbase;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+
+import javax.sql.DataSource;
+
+import org.jumpmind.db.platform.AbstractJdbcDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.log.Log;
+
+/*
+ * The platform implementation for the Interbase database.
+ */
+public class InterbasePlatform extends AbstractJdbcDatabasePlatform {
+
+ /* Database name of this platform. */
+ public static final String DATABASENAME = "Interbase";
+
+ /* The interbase jdbc driver. */
+ public static final String JDBC_DRIVER = "interbase.interclient.Driver";
+
+ /* The subprotocol used by the interbase driver. */
+ public static final String JDBC_SUBPROTOCOL = "interbase";
+
+ public static int SWITCH_TO_LONGVARCHAR_SIZE = 4096;
+
+ /*
+ * Creates a new platform instance.
+ */
+ public InterbasePlatform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+
+ info.setMaxIdentifierLength(31);
+ info.setCommentPrefix("/*");
+ info.setCommentSuffix("*/");
+ info.setSystemForeignKeyIndicesAlwaysNonUnique(true);
+
+ // BINARY and VARBINARY are also handled by the
+ // InterbaseBuilder.getSqlType method
+ info.addNativeTypeMapping(Types.ARRAY, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.BIGINT, "NUMERIC(18,0)");
+ // Theoretically we could use (VAR)CHAR CHARACTER SET OCTETS but the
+ // JDBC driver is not
+ // able to handle that properly (the byte[]/BinaryStream accessors do
+ // not work)
+ info.addNativeTypeMapping(Types.BINARY, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.BIT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping(Types.BLOB, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.CLOB, "BLOB SUB_TYPE TEXT");
+ info.addNativeTypeMapping(Types.DISTINCT, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.DOUBLE, "DOUBLE PRECISION");
+ info.addNativeTypeMapping(Types.FLOAT, "DOUBLE PRECISION", Types.DOUBLE);
+ info.addNativeTypeMapping(Types.JAVA_OBJECT, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.LONGVARBINARY, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.LONGVARCHAR, "VARCHAR(" + SWITCH_TO_LONGVARCHAR_SIZE + ")",
+ Types.VARCHAR);
+ info.addNativeTypeMapping(Types.NULL, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.OTHER, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.REAL, "FLOAT");
+ info.addNativeTypeMapping(Types.REF, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.STRUCT, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping(Types.VARBINARY, "BLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping("BOOLEAN", "SMALLINT", "SMALLINT");
+ info.addNativeTypeMapping("DATALINK", "BLOB", "LONGVARBINARY");
+
+ info.setDefaultSize(Types.CHAR, 254);
+ info.setDefaultSize(Types.VARCHAR, 254);
+ info.setHasSize(Types.BINARY, false);
+ info.setHasSize(Types.VARBINARY, false);
+
+ info.setStoresUpperCaseInCatalog(true);
+ info.setNonBlankCharColumnSpacePadded(true);
+ info.setBlankCharColumnSpacePadded(true);
+ info.setCharColumnSpaceTrimmed(false);
+ info.setEmptyStringNulled(false);
+
+ primaryKeyViolationCodes = new int [] {335544665};
+
+ ddlReader = new InterbaseDdlReader(log, this);
+ ddlBuilder = new InterbaseBuilder(log, this);
+ }
+
+ public String getName() {
+ return DATABASENAME;
+ }
+
+ public String getDefaultCatalog() {
+ return null;
+ }
+
+ public String getDefaultSchema() {
+ return null;
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlBuilder.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlBuilder.java
new file mode 100644
index 0000000000..81a185ff48
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlBuilder.java
@@ -0,0 +1,643 @@
+package org.jumpmind.db.platform.mssql;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.alter.AddColumnChange;
+import org.jumpmind.db.alter.AddForeignKeyChange;
+import org.jumpmind.db.alter.AddIndexChange;
+import org.jumpmind.db.alter.AddPrimaryKeyChange;
+import org.jumpmind.db.alter.ColumnAutoIncrementChange;
+import org.jumpmind.db.alter.ColumnChange;
+import org.jumpmind.db.alter.ColumnDataTypeChange;
+import org.jumpmind.db.alter.ColumnSizeChange;
+import org.jumpmind.db.alter.IModelChange;
+import org.jumpmind.db.alter.PrimaryKeyChange;
+import org.jumpmind.db.alter.RemoveColumnChange;
+import org.jumpmind.db.alter.RemoveForeignKeyChange;
+import org.jumpmind.db.alter.RemoveIndexChange;
+import org.jumpmind.db.alter.RemovePrimaryKeyChange;
+import org.jumpmind.db.alter.TableChange;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.platform.AbstractDdlBuilder;
+import org.jumpmind.db.platform.PlatformUtils;
+import org.jumpmind.log.Log;
+
+/*
+ * The SQL Builder for the Microsoft SQL Server.
+ */
+public class MsSqlBuilder extends AbstractDdlBuilder {
+
+ /* We use a generic date format. */
+ private DateFormat _genericDateFormat = new SimpleDateFormat("yyyy-MM-dd");
+
+ /* We use a generic date format. */
+ private DateFormat _genericTimeFormat = new SimpleDateFormat("HH:mm:ss");
+
+ public MsSqlBuilder(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ addEscapedCharSequence("'", "''");
+ }
+
+ @Override
+ public void createTable(Table table, StringBuilder ddl) {
+ writeQuotationOnStatement(ddl);
+ super.createTable(table, ddl);
+ }
+
+ @Override
+ public void dropTable(Table table, StringBuilder ddl) {
+ String tableName = getTableName(table.getName());
+ String tableNameVar = "tn" + createUniqueIdentifier();
+ String constraintNameVar = "cn" + createUniqueIdentifier();
+
+ writeQuotationOnStatement(ddl);
+ ddl.append("IF EXISTS (SELECT 1 FROM sysobjects WHERE type = 'U' AND name = ");
+ printAlwaysSingleQuotedIdentifier(tableName, ddl);
+ println(")", ddl);
+ println("BEGIN", ddl);
+ println(" DECLARE @" + tableNameVar + " nvarchar(256), @" + constraintNameVar
+ + " nvarchar(256)", ddl);
+ println(" DECLARE refcursor CURSOR FOR", ddl);
+ println(" SELECT object_name(objs.parent_obj) tablename, objs.name constraintname", ddl);
+ println(" FROM sysobjects objs JOIN sysconstraints cons ON objs.id = cons.constid", ddl);
+ ddl.append(" WHERE objs.xtype != 'PK' AND object_name(objs.parent_obj) = ");
+ printAlwaysSingleQuotedIdentifier(tableName, ddl);
+ println(" OPEN refcursor", ddl);
+ println(" FETCH NEXT FROM refcursor INTO @" + tableNameVar + ", @" + constraintNameVar,
+ ddl);
+ println(" WHILE @@FETCH_STATUS = 0", ddl);
+ println(" BEGIN", ddl);
+ println(" EXEC ('ALTER TABLE '+@" + tableNameVar + "+' DROP CONSTRAINT '+@"
+ + constraintNameVar + ")", ddl);
+ println(" FETCH NEXT FROM refcursor INTO @" + tableNameVar + ", @" + constraintNameVar,
+ ddl);
+ println(" END", ddl);
+ println(" CLOSE refcursor", ddl);
+ println(" DEALLOCATE refcursor", ddl);
+ ddl.append(" DROP TABLE ");
+ printlnIdentifier(tableName, ddl);
+ ddl.append("END");
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ public void dropExternalForeignKeys(Table table, StringBuilder ddl) {
+ writeQuotationOnStatement(ddl);
+ super.dropExternalForeignKeys(table, ddl);
+ }
+
+ @Override
+ protected DateFormat getValueDateFormat() {
+ return _genericDateFormat;
+ }
+
+ @Override
+ protected DateFormat getValueTimeFormat() {
+ return _genericTimeFormat;
+ }
+
+ @Override
+ protected String getValueAsString(Column column, Object value) {
+ if (value == null) {
+ return "NULL";
+ }
+
+ StringBuffer result = new StringBuffer();
+
+ switch (column.getTypeCode()) {
+ case Types.REAL:
+ case Types.NUMERIC:
+ case Types.FLOAT:
+ case Types.DOUBLE:
+ case Types.DECIMAL:
+ // SQL Server does not want quotes around the value
+ if (!(value instanceof String) && (getValueNumberFormat() != null)) {
+ result.append(getValueNumberFormat().format(value));
+ } else {
+ result.append(value.toString());
+ }
+ break;
+ case Types.DATE:
+ result.append("CAST(");
+ result.append(platform.getPlatformInfo().getValueQuoteToken());
+ result.append(value instanceof String ? (String) value : getValueDateFormat().format(
+ value));
+ result.append(platform.getPlatformInfo().getValueQuoteToken());
+ result.append(" AS datetime)");
+ break;
+ case Types.TIME:
+ result.append("CAST(");
+ result.append(platform.getPlatformInfo().getValueQuoteToken());
+ result.append(value instanceof String ? (String) value : getValueTimeFormat().format(
+ value));
+ result.append(platform.getPlatformInfo().getValueQuoteToken());
+ result.append(" AS datetime)");
+ break;
+ case Types.TIMESTAMP:
+ result.append("CAST(");
+ result.append(platform.getPlatformInfo().getValueQuoteToken());
+ result.append(value.toString());
+ result.append(platform.getPlatformInfo().getValueQuoteToken());
+ result.append(" AS datetime)");
+ break;
+ }
+ return super.getValueAsString(column, value);
+ }
+
+ @Override
+ protected String getNativeDefaultValue(Column column) {
+ // Sql Server wants BIT default values as 0 or 1
+ if ((column.getTypeCode() == Types.BIT)
+ || (PlatformUtils.supportsJava14JdbcTypes() && (column.getTypeCode() == PlatformUtils
+ .determineBooleanTypeCode()))) {
+ return getDefaultValueHelper().convert(column.getDefaultValue(), column.getTypeCode(),
+ Types.SMALLINT).toString();
+ } else {
+ return super.getNativeDefaultValue(column);
+ }
+ }
+
+ @Override
+ protected void writeColumnAutoIncrementStmt(Table table, Column column, StringBuilder ddl) {
+ ddl.append("IDENTITY (1,1) ");
+ }
+
+ @Override
+ public void writeExternalIndexDropStmt(Table table, IIndex index, StringBuilder ddl) {
+ ddl.append("DROP INDEX ");
+ printIdentifier(getTableName(table.getName()), ddl);
+ ddl.append(".");
+ printIdentifier(getIndexName(index), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ protected void writeExternalForeignKeyDropStmt(Table table, ForeignKey foreignKey,
+ StringBuilder ddl) {
+ String constraintName = getForeignKeyName(table, foreignKey);
+
+ ddl.append("IF EXISTS (SELECT 1 FROM sysobjects WHERE type = 'F' AND name = ");
+ printAlwaysSingleQuotedIdentifier(constraintName, ddl);
+ println(")", ddl);
+ printIndent(ddl);
+ ddl.append("ALTER TABLE ");
+ printIdentifier(getTableName(table.getName()), ddl);
+ ddl.append(" DROP CONSTRAINT ");
+ printIdentifier(constraintName, ddl);
+ printEndOfStatement(ddl);
+ }
+
+ /*
+ * Returns the statement that turns on the ability to write delimited
+ * identifiers.
+ *
+ * @return The quotation-on statement
+ */
+ private String getQuotationOnStatement() {
+ if (platform.isDelimitedIdentifierModeOn()) {
+ return "SET quoted_identifier on" + platform.getPlatformInfo().getSqlCommandDelimiter()
+ + "\n";
+ } else {
+ return "";
+ }
+ }
+
+ /*
+ * Writes the statement that turns on the ability to write delimited
+ * identifiers.
+ */
+ private void writeQuotationOnStatement(StringBuilder ddl) {
+ ddl.append(getQuotationOnStatement());
+ }
+
+ @Override
+ public String getSelectLastIdentityValues(Table table) {
+ return "SELECT @@IDENTITY";
+ }
+
+ /*
+ * Returns the SQL to enable identity override mode.
+ *
+ * @param table The table to enable the mode for
+ *
+ * @return The SQL
+ */
+ protected String getEnableIdentityOverrideSql(Table table) {
+ StringBuffer result = new StringBuffer();
+
+ result.append(getQuotationOnStatement());
+ result.append("SET IDENTITY_INSERT ");
+ result.append(getDelimitedIdentifier(getTableName(table.getName())));
+ result.append(" ON");
+ result.append(platform.getPlatformInfo().getSqlCommandDelimiter());
+
+ return result.toString();
+ }
+
+ /*
+ * Returns the SQL to disable identity override mode.
+ *
+ * @param table The table to disable the mode for
+ *
+ * @return The SQL
+ */
+ protected String getDisableIdentityOverrideSql(Table table) {
+ StringBuffer result = new StringBuffer();
+
+ result.append(getQuotationOnStatement());
+ result.append("SET IDENTITY_INSERT ");
+ result.append(getDelimitedIdentifier(getTableName(table.getName())));
+ result.append(" OFF");
+ result.append(platform.getPlatformInfo().getSqlCommandDelimiter());
+
+ return result.toString();
+ }
+
+ @Override
+ public String getDeleteSql(Table table, Map pkValues, boolean genPlaceholders) {
+ return getQuotationOnStatement() + super.getDeleteSql(table, pkValues, genPlaceholders);
+ }
+
+ @Override
+ public String getInsertSql(Table table, Map columnValues,
+ boolean genPlaceholders) {
+ return getQuotationOnStatement() + super.getInsertSql(table, columnValues, genPlaceholders);
+ }
+
+ @Override
+ public String getUpdateSql(Table table, Map columnValues,
+ boolean genPlaceholders) {
+ return getQuotationOnStatement() + super.getUpdateSql(table, columnValues, genPlaceholders);
+ }
+
+ /*
+ * Prints the given identifier with enforced single quotes around it
+ * regardless of whether delimited identifiers are turned on or not.
+ *
+ * @param identifier The identifier
+ */
+ private void printAlwaysSingleQuotedIdentifier(String identifier, StringBuilder ddl) {
+ ddl.append("'");
+ ddl.append(identifier);
+ ddl.append("'");
+ }
+
+ @Override
+ public void writeCopyDataStatement(Table sourceTable, Table targetTable, StringBuilder ddl) {
+ // Sql Server per default does not allow us to insert values explicitly
+ // into
+ // identity columns. However, we can change this behavior
+ boolean hasIdentityColumns = targetTable.getAutoIncrementColumns().length > 0;
+
+ if (hasIdentityColumns) {
+ ddl.append("SET IDENTITY_INSERT ");
+ printIdentifier(getTableName(targetTable.getName()), ddl);
+ ddl.append(" ON");
+ printEndOfStatement(ddl);
+ }
+ super.writeCopyDataStatement(sourceTable, targetTable, ddl);
+ // We have to turn it off ASAP because it can be on only for one table
+ // per session
+ if (hasIdentityColumns) {
+ ddl.append("SET IDENTITY_INSERT ");
+ printIdentifier(getTableName(targetTable.getName()), ddl);
+ ddl.append(" OFF");
+ printEndOfStatement(ddl);
+ }
+ }
+
+ @Override
+ protected void processChanges(Database currentModel, Database desiredModel,
+ List changes, StringBuilder ddl) {
+ if (!changes.isEmpty()) {
+ writeQuotationOnStatement(ddl);
+ }
+ // For column data type and size changes, we need to drop and then
+ // re-create indexes
+ // and foreign keys using the column, as well as any primary keys
+ // containg
+ // these columns
+ // However, if the index/foreign key/primary key is already slated for
+ // removal or
+ // change, then we don't want to generate change duplication
+ HashSet removedIndexes = new HashSet();
+ HashSet removedForeignKeys = new HashSet();
+ HashSet removedPKs = new HashSet();
+
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ Object change = changeIt.next();
+
+ if (change instanceof RemoveIndexChange) {
+ removedIndexes.add(((RemoveIndexChange) change).getIndex());
+ } else if (change instanceof RemoveForeignKeyChange) {
+ removedForeignKeys.add(((RemoveForeignKeyChange) change).getForeignKey());
+ } else if (change instanceof RemovePrimaryKeyChange) {
+ removedPKs.add(((RemovePrimaryKeyChange) change).getChangedTable());
+ }
+ }
+
+ ArrayList additionalChanges = new ArrayList();
+
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ Object change = changeIt.next();
+
+ if ((change instanceof ColumnDataTypeChange) || (change instanceof ColumnSizeChange)) {
+ Column column = ((ColumnChange) change).getChangedColumn();
+ Table table = ((ColumnChange) change).getChangedTable();
+
+ if (column.isPrimaryKey() && !removedPKs.contains(table)) {
+ Column[] pk = table.getPrimaryKeyColumns();
+
+ additionalChanges.add(new RemovePrimaryKeyChange(table, pk));
+ additionalChanges.add(new AddPrimaryKeyChange(table, pk));
+ removedPKs.add(table);
+ }
+ for (int idx = 0; idx < table.getIndexCount(); idx++) {
+ IIndex index = table.getIndex(idx);
+
+ if (index.hasColumn(column) && !removedIndexes.contains(index)) {
+ additionalChanges.add(new RemoveIndexChange(table, index));
+ additionalChanges.add(new AddIndexChange(table, index));
+ removedIndexes.add(index);
+ }
+ }
+ for (int tableIdx = 0; tableIdx < currentModel.getTableCount(); tableIdx++) {
+ Table curTable = currentModel.getTable(tableIdx);
+
+ for (int fkIdx = 0; fkIdx < curTable.getForeignKeyCount(); fkIdx++) {
+ ForeignKey curFk = curTable.getForeignKey(fkIdx);
+
+ if ((curFk.hasLocalColumn(column) || curFk.hasForeignColumn(column))
+ && !removedForeignKeys.contains(curFk)) {
+ additionalChanges.add(new RemoveForeignKeyChange(curTable, curFk));
+ additionalChanges.add(new AddForeignKeyChange(curTable, curFk));
+ removedForeignKeys.add(curFk);
+ }
+ }
+ }
+ }
+ }
+ changes.addAll(additionalChanges);
+ super.processChanges(currentModel, desiredModel, changes, ddl);
+ }
+
+ @Override
+ protected void processTableStructureChanges(Database currentModel, Database desiredModel,
+ Table sourceTable, Table targetTable, List changes, StringBuilder ddl) {
+ // First we drop primary keys as necessary
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof RemovePrimaryKeyChange) {
+ processChange(currentModel, desiredModel, (RemovePrimaryKeyChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof PrimaryKeyChange) {
+ PrimaryKeyChange pkChange = (PrimaryKeyChange) change;
+ RemovePrimaryKeyChange removePkChange = new RemovePrimaryKeyChange(
+ pkChange.getChangedTable(), pkChange.getOldPrimaryKeyColumns());
+
+ processChange(currentModel, desiredModel, removePkChange, ddl);
+ }
+ }
+
+ ArrayList columnChanges = new ArrayList();
+
+ // Next we add/remove columns
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = (TableChange) changeIt.next();
+
+ if (change instanceof AddColumnChange) {
+ AddColumnChange addColumnChange = (AddColumnChange) change;
+
+ // Sql Server can only add not insert columns
+ if (addColumnChange.isAtEnd()) {
+ processChange(currentModel, desiredModel, addColumnChange, ddl);
+ changeIt.remove();
+ }
+ } else if (change instanceof RemoveColumnChange) {
+ processChange(currentModel, desiredModel, (RemoveColumnChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof ColumnAutoIncrementChange) {
+ // Sql Server has no way of adding or removing an IDENTITY
+ // constraint
+ // Thus we have to rebuild the table anyway and can ignore all
+ // the other
+ // column changes
+ columnChanges = null;
+ } else if ((change instanceof ColumnChange) && (columnChanges != null)) {
+ // we gather all changed columns because we can use the ALTER
+ // TABLE ALTER COLUMN
+ // statement for them
+ columnChanges.add(change);
+ }
+ }
+ if (columnChanges != null) {
+ HashSet processedColumns = new HashSet();
+
+ for (Iterator changeIt = columnChanges.iterator(); changeIt.hasNext();) {
+ ColumnChange change = (ColumnChange) changeIt.next();
+ Column sourceColumn = change.getChangedColumn();
+ Column targetColumn = targetTable.findColumn(sourceColumn.getName(),
+ platform.isDelimitedIdentifierModeOn());
+
+ if (!processedColumns.contains(targetColumn)) {
+ processColumnChange(sourceTable, targetTable, sourceColumn, targetColumn,
+ (change instanceof ColumnDataTypeChange)
+ || (change instanceof ColumnSizeChange), ddl);
+ processedColumns.add(targetColumn);
+ }
+ changes.remove(change);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+ }
+ // Finally we add primary keys
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = (TableChange) changeIt.next();
+
+ if (change instanceof AddPrimaryKeyChange) {
+ processChange(currentModel, desiredModel, (AddPrimaryKeyChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof PrimaryKeyChange) {
+ PrimaryKeyChange pkChange = (PrimaryKeyChange) change;
+ AddPrimaryKeyChange addPkChange = new AddPrimaryKeyChange(
+ pkChange.getChangedTable(), pkChange.getNewPrimaryKeyColumns());
+
+ processChange(currentModel, desiredModel, addPkChange, ddl);
+ changeIt.remove();
+ }
+ }
+ }
+
+ /*
+ * Processes the addition of a column to a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ AddColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD ");
+ writeColumn(change.getChangedTable(), change.getNewColumn(), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a column from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemoveColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP COLUMN ");
+ printIdentifier(getColumnName(change.getColumn()), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a primary key from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemovePrimaryKeyChange change, StringBuilder ddl) {
+ // TODO: this would be easier when named primary keys are supported
+ // because then we can use ALTER TABLE DROP
+ String tableName = getTableName(change.getChangedTable().getName());
+ String tableNameVar = "tn" + createUniqueIdentifier();
+ String constraintNameVar = "cn" + createUniqueIdentifier();
+
+ println("BEGIN", ddl);
+ println(" DECLARE @" + tableNameVar + " nvarchar(256), @" + constraintNameVar
+ + " nvarchar(256)", ddl);
+ println(" DECLARE refcursor CURSOR FOR", ddl);
+ println(" SELECT object_name(objs.parent_obj) tablename, objs.name constraintname", ddl);
+ println(" FROM sysobjects objs JOIN sysconstraints cons ON objs.id = cons.constid", ddl);
+ ddl.append(" WHERE objs.xtype = 'PK' AND object_name(objs.parent_obj) = ");
+ printAlwaysSingleQuotedIdentifier(tableName, ddl);
+ println(" OPEN refcursor", ddl);
+ println(" FETCH NEXT FROM refcursor INTO @" + tableNameVar + ", @" + constraintNameVar,
+ ddl);
+ println(" WHILE @@FETCH_STATUS = 0", ddl);
+ println(" BEGIN", ddl);
+ println(" EXEC ('ALTER TABLE '+@" + tableNameVar + "+' DROP CONSTRAINT '+@"
+ + constraintNameVar + ")", ddl);
+ println(" FETCH NEXT FROM refcursor INTO @" + tableNameVar + ", @" + constraintNameVar,
+ ddl);
+ println(" END", ddl);
+ println(" CLOSE refcursor", ddl);
+ println(" DEALLOCATE refcursor", ddl);
+ ddl.append("END");
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes a change to a column.
+ */
+ protected void processColumnChange(Table sourceTable, Table targetTable, Column sourceColumn,
+ Column targetColumn, boolean typeChange, StringBuilder ddl) {
+ boolean hasDefault = sourceColumn.getParsedDefaultValue() != null;
+ boolean shallHaveDefault = targetColumn.getParsedDefaultValue() != null;
+ String newDefault = targetColumn.getDefaultValue();
+
+ // Sql Server does not like it if there is a default spec in the ALTER
+ // TABLE ALTER COLUMN
+ // statement; thus we have to change the default manually
+ if (newDefault != null) {
+ targetColumn.setDefaultValue(null);
+ }
+ if (hasDefault) {
+ // we're dropping the old default
+ String tableName = getTableName(sourceTable.getName());
+ String columnName = getColumnName(sourceColumn);
+ String tableNameVar = "tn" + createUniqueIdentifier();
+ String constraintNameVar = "cn" + createUniqueIdentifier();
+
+ println("BEGIN", ddl);
+ println(" DECLARE @" + tableNameVar + " nvarchar(256), @" + constraintNameVar
+ + " nvarchar(256)", ddl);
+ println(" DECLARE refcursor CURSOR FOR", ddl);
+ println(" SELECT object_name(objs.parent_obj) tablename, objs.name constraintname",
+ ddl);
+ println(" FROM sysobjects objs JOIN sysconstraints cons ON objs.id = cons.constid",
+ ddl);
+ println(" WHERE objs.xtype = 'D' AND", ddl);
+ ddl.append(" cons.colid = (SELECT colid FROM syscolumns WHERE id = object_id(");
+ printAlwaysSingleQuotedIdentifier(tableName, ddl);
+ ddl.append(") AND name = ");
+ printAlwaysSingleQuotedIdentifier(columnName, ddl);
+ println(") AND", ddl);
+ ddl.append(" object_name(objs.parent_obj) = ");
+ printAlwaysSingleQuotedIdentifier(tableName, ddl);
+ println(" OPEN refcursor", ddl);
+ println(" FETCH NEXT FROM refcursor INTO @" + tableNameVar + ", @" + constraintNameVar,
+ ddl);
+ println(" WHILE @@FETCH_STATUS = 0", ddl);
+ println(" BEGIN", ddl);
+ println(" EXEC ('ALTER TABLE '+@" + tableNameVar + "+' DROP CONSTRAINT '+@"
+ + constraintNameVar + ")", ddl);
+ println(" FETCH NEXT FROM refcursor INTO @" + tableNameVar + ", @"
+ + constraintNameVar, ddl);
+ println(" END", ddl);
+ println(" CLOSE refcursor", ddl);
+ println(" DEALLOCATE refcursor", ddl);
+ ddl.append("END");
+ printEndOfStatement(ddl);
+ }
+
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(sourceTable.getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ALTER COLUMN ");
+ writeColumn(sourceTable, targetColumn, ddl);
+ printEndOfStatement(ddl);
+
+ if (shallHaveDefault) {
+ targetColumn.setDefaultValue(newDefault);
+
+ // if the column shall have a default, then we have to add it as a
+ // constraint
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(sourceTable.getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD CONSTRAINT ");
+ printIdentifier(getConstraintName("DF", sourceTable, sourceColumn.getName(), null), ddl);
+ writeColumnDefaultValueStmt(sourceTable, targetColumn, ddl);
+ ddl.append(" FOR ");
+ printIdentifier(getColumnName(sourceColumn), ddl);
+ printEndOfStatement(ddl);
+ }
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlDdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlDdlReader.java
new file mode 100644
index 0000000000..fe3191a1d2
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlDdlReader.java
@@ -0,0 +1,208 @@
+package org.jumpmind.db.platform.mssql;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.Date;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.sql.Types;
+import java.util.Map;
+
+import org.apache.oro.text.regex.MalformedPatternException;
+import org.apache.oro.text.regex.Pattern;
+import org.apache.oro.text.regex.PatternCompiler;
+import org.apache.oro.text.regex.PatternMatcher;
+import org.apache.oro.text.regex.Perl5Compiler;
+import org.apache.oro.text.regex.Perl5Matcher;
+import org.jumpmind.db.DdlException;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractJdbcDdlReader;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.log.Log;
+
+/*
+ * Reads a database model from a Microsoft Sql Server database.
+ */
+public class MsSqlDdlReader extends AbstractJdbcDdlReader {
+
+ /* Known system tables that Sql Server creates (e.g. automatic maintenance). */
+ private static final String[] KNOWN_SYSTEM_TABLES = { "dtproperties" };
+
+ /* The regular expression pattern for the ISO dates. */
+ private Pattern _isoDatePattern;
+
+ /* The regular expression pattern for the ISO times. */
+ private Pattern _isoTimePattern;
+
+ public MsSqlDdlReader(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ setDefaultCatalogPattern(null);
+ setDefaultSchemaPattern(null);
+ setDefaultTablePattern("%");
+
+ PatternCompiler compiler = new Perl5Compiler();
+
+ try {
+ _isoDatePattern = compiler.compile("'(\\d{4}\\-\\d{2}\\-\\d{2})'");
+ _isoTimePattern = compiler.compile("'(\\d{2}:\\d{2}:\\d{2})'");
+ } catch (MalformedPatternException ex) {
+ throw new DdlException(ex);
+ }
+ }
+
+ @Override
+ protected Table readTable(Connection connection, DatabaseMetaDataWrapper metaData, Map values)
+ throws SQLException {
+ String tableName = (String) values.get("TABLE_NAME");
+
+ for (int idx = 0; idx < KNOWN_SYSTEM_TABLES.length; idx++) {
+ if (KNOWN_SYSTEM_TABLES[idx].equals(tableName)) {
+ return null;
+ }
+ }
+
+ Table table = super.readTable(connection, metaData, values);
+
+ if (table != null) {
+ // Sql Server does not return the auto-increment status via the
+ // database metadata
+ determineAutoIncrementFromResultSetMetaData(connection, table, table.getColumns());
+
+ // TODO: Replace this manual filtering using named pks once they are
+ // available
+ // This is then probably of interest to every platform
+ for (int idx = 0; idx < table.getIndexCount();) {
+ IIndex index = table.getIndex(idx);
+
+ if (index.isUnique() && existsPKWithName(metaData, table, index.getName())) {
+ table.removeIndex(idx);
+ } else {
+ idx++;
+ }
+ }
+ }
+ return table;
+ }
+
+ @Override
+ protected boolean isInternalPrimaryKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, IIndex index) {
+ // Sql Server generates an index "PK__[table name]__[hex number]"
+ StringBuffer pkIndexName = new StringBuffer();
+
+ pkIndexName.append("PK__");
+ pkIndexName.append(table.getName());
+ pkIndexName.append("__");
+
+ return index.getName().toUpperCase().startsWith(pkIndexName.toString().toUpperCase());
+ }
+
+ /*
+ * Determines whether there is a pk for the table with the given name.
+ *
+ * @param metaData The database metadata
+ *
+ * @param table The table
+ *
+ * @param name The pk name
+ *
+ * @return true if there is such a pk
+ */
+ private boolean existsPKWithName(DatabaseMetaDataWrapper metaData, Table table, String name) {
+ try {
+ ResultSet pks = metaData.getPrimaryKeys(table.getName());
+ boolean found = false;
+
+ while (pks.next() && !found) {
+ if (name.equals(pks.getString("PK_NAME"))) {
+ found = true;
+ }
+ }
+ pks.close();
+ return found;
+ } catch (SQLException ex) {
+ throw new DdlException(ex);
+ }
+ }
+
+ protected Integer overrideJdbcTypeForColumn(Map values) {
+ String typeName = (String) values.get("TYPE_NAME");
+ if (typeName != null && typeName.startsWith("TEXT")) {
+ return Types.CLOB;
+ } else {
+ return super.overrideJdbcTypeForColumn(values);
+ }
+ }
+
+ @Override
+ protected Column readColumn(DatabaseMetaDataWrapper metaData, Map values) throws SQLException {
+ Column column = super.readColumn(metaData, values);
+ String defaultValue = column.getDefaultValue();
+
+ // Sql Server tends to surround the returned default value with one or
+ // two sets of parentheses
+ if (defaultValue != null) {
+ while (defaultValue.startsWith("(") && defaultValue.endsWith(")")) {
+ defaultValue = defaultValue.substring(1, defaultValue.length() - 1);
+ }
+
+ if (column.getTypeCode() == Types.TIMESTAMP) {
+ // Sql Server maintains the default values for DATE/TIME jdbc
+ // types, so we have to
+ // migrate the default value to TIMESTAMP
+ PatternMatcher matcher = new Perl5Matcher();
+ Timestamp timestamp = null;
+
+ if (matcher.matches(defaultValue, _isoDatePattern)) {
+ timestamp = new Timestamp(Date.valueOf(matcher.getMatch().group(1)).getTime());
+ } else if (matcher.matches(defaultValue, _isoTimePattern)) {
+ timestamp = new Timestamp(Time.valueOf(matcher.getMatch().group(1)).getTime());
+ }
+ if (timestamp != null) {
+ defaultValue = timestamp.toString();
+ }
+ } else if (column.getTypeCode() == Types.DECIMAL) {
+ // For some reason, Sql Server 2005 always returns DECIMAL
+ // default values with a dot
+ // even if the scale is 0, so we remove the dot
+ if ((column.getScale() == 0) && defaultValue.endsWith(".")) {
+ defaultValue = defaultValue.substring(0, defaultValue.length() - 1);
+ }
+ } else if (TypeMap.isTextType(column.getTypeCode())) {
+ defaultValue = unescape(defaultValue, "'", "''");
+ }
+
+ column.setDefaultValue(defaultValue);
+ }
+ if ((column.getTypeCode() == Types.DECIMAL) && (column.getSizeAsInt() == 19)
+ && (column.getScale() == 0)) {
+ column.setTypeCode(Types.BIGINT);
+ }
+
+ return column;
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlJdbcSqlTemplate.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlJdbcSqlTemplate.java
new file mode 100644
index 0000000000..a740fe1270
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlJdbcSqlTemplate.java
@@ -0,0 +1,20 @@
+package org.jumpmind.db.platform.mssql;
+
+import javax.sql.DataSource;
+
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.db.sql.ISqlTransaction;
+import org.jumpmind.db.sql.jdbc.JdbcSqlTemplate;
+
+public class MsSqlJdbcSqlTemplate extends JdbcSqlTemplate {
+
+ public MsSqlJdbcSqlTemplate(DataSource dataSource, DatabasePlatformSettings settings) {
+ super(dataSource, settings, null);
+ }
+
+ @Override
+ public ISqlTransaction startSqlTransaction() {
+ return new MsSqlJdbcSqlTransaction(this);
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlJdbcSqlTransaction.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlJdbcSqlTransaction.java
new file mode 100644
index 0000000000..e99d08f925
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlJdbcSqlTransaction.java
@@ -0,0 +1,26 @@
+package org.jumpmind.db.platform.mssql;
+
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.sql.jdbc.JdbcSqlTemplate;
+import org.jumpmind.db.sql.jdbc.JdbcSqlTransaction;
+
+public class MsSqlJdbcSqlTransaction extends JdbcSqlTransaction {
+
+ public MsSqlJdbcSqlTransaction(JdbcSqlTemplate sqltemplate) {
+ super(sqltemplate);
+ }
+
+ @Override
+ public void allowInsertIntoAutoIncrementColumns(boolean allow, Table table) {
+ if (table != null && table.getAutoIncrementColumns().length > 0) {
+ if (allow) {
+ execute(String.format("SET IDENTITY_INSERT %s ON",
+ table.getFullyQualifiedTableName()));
+ } else {
+ execute(String.format("SET IDENTITY_INSERT %s OFF",
+ table.getFullyQualifiedTableName()));
+ }
+ }
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlPlatform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlPlatform.java
new file mode 100644
index 0000000000..d9fca75ada
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mssql/MsSqlPlatform.java
@@ -0,0 +1,121 @@
+package org.jumpmind.db.platform.mssql;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.jumpmind.db.platform.AbstractJdbcDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.log.Log;
+
+/*
+ * The platform implementation for the Microsoft SQL Server database.
+ */
+public class MsSqlPlatform extends AbstractJdbcDatabasePlatform {
+
+ /* Database name of this platform. */
+ public static final String DATABASENAME = "MsSql";
+
+ /* The standard SQLServer jdbc driver. */
+ public static final String JDBC_DRIVER = "net.sourceforge.jtds.jdbc.Driver";
+
+ /* The subprotocol used by the standard SQL Server driver. */
+ public static final String JDBC_SUBPROTOCOL = "jtds";
+
+ /*
+ * Creates a new platform instance.
+ */
+ public MsSqlPlatform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+
+ info.setMaxIdentifierLength(128);
+
+ info.addNativeTypeMapping(Types.ARRAY, "IMAGE", Types.LONGVARBINARY);
+ // BIGINT will be mapped back to BIGINT by the model reader
+ info.addNativeTypeMapping(Types.BIGINT, "DECIMAL(19,0)");
+ info.addNativeTypeMapping(Types.BLOB, "IMAGE", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.CLOB, "TEXT", Types.LONGVARCHAR);
+ info.addNativeTypeMapping(Types.DATE, "DATETIME", Types.TIMESTAMP);
+ info.addNativeTypeMapping(Types.DISTINCT, "IMAGE", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.DOUBLE, "FLOAT", Types.FLOAT);
+ info.addNativeTypeMapping(Types.INTEGER, "INT");
+ info.addNativeTypeMapping(Types.JAVA_OBJECT, "IMAGE", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.LONGVARBINARY, "IMAGE");
+ info.addNativeTypeMapping(Types.LONGVARCHAR, "TEXT");
+ info.addNativeTypeMapping(Types.NULL, "IMAGE", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.OTHER, "IMAGE", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.REF, "IMAGE", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.STRUCT, "IMAGE", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.TIME, "DATETIME", Types.TIMESTAMP);
+ info.addNativeTypeMapping(Types.TIMESTAMP, "DATETIME");
+ info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping("BOOLEAN", "BIT", "BIT");
+ info.addNativeTypeMapping("DATALINK", "IMAGE", "LONGVARBINARY");
+
+ info.setDefaultSize(Types.CHAR, 254);
+ info.setDefaultSize(Types.VARCHAR, 254);
+ info.setDefaultSize(Types.BINARY, 254);
+ info.setDefaultSize(Types.VARBINARY, 254);
+
+ info.setStoresUpperCaseInCatalog(true);
+ info.setDateOverridesToTimestamp(true);
+ info.setNonBlankCharColumnSpacePadded(true);
+ info.setBlankCharColumnSpacePadded(true);
+ info.setCharColumnSpaceTrimmed(false);
+ info.setEmptyStringNulled(false);
+ info.setAutoIncrementUpdateAllowed(false);
+
+ primaryKeyViolationSqlStates = new String[] {"2627"};
+
+ ddlReader = new MsSqlDdlReader(log, this);
+ ddlBuilder = new MsSqlBuilder(log, this);
+
+ setDelimitedIdentifierModeOn(true);
+ }
+
+ @Override
+ protected void createSqlTemplate() {
+ this.sqlTemplate = new MsSqlJdbcSqlTemplate(dataSource, settings);
+ }
+
+ public String getName() {
+ return DATABASENAME;
+ }
+
+ public String getDefaultCatalog() {
+ if (StringUtils.isBlank(defaultCatalog)) {
+ defaultCatalog = (String) getSqlTemplate().queryForObject("select DB_NAME()",
+ String.class);
+ }
+ return defaultCatalog;
+ }
+
+ public String getDefaultSchema() {
+ if (StringUtils.isBlank(defaultSchema)) {
+ defaultSchema = (String) getSqlTemplate().queryForObject("select SCHEMA_NAME()",
+ String.class);
+ }
+ return defaultSchema;
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mysql/MySqlBuilder.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mysql/MySqlBuilder.java
new file mode 100644
index 0000000000..0970d9779a
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mysql/MySqlBuilder.java
@@ -0,0 +1,249 @@
+package org.jumpmind.db.platform.mysql;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.commons.collections.set.ListOrderedSet;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.alter.AddColumnChange;
+import org.jumpmind.db.alter.AddPrimaryKeyChange;
+import org.jumpmind.db.alter.ColumnChange;
+import org.jumpmind.db.alter.PrimaryKeyChange;
+import org.jumpmind.db.alter.RemoveColumnChange;
+import org.jumpmind.db.alter.RemovePrimaryKeyChange;
+import org.jumpmind.db.alter.TableChange;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.platform.AbstractDdlBuilder;
+import org.jumpmind.log.Log;
+
+/*
+ * The SQL Builder for MySQL.
+ */
+public class MySqlBuilder extends AbstractDdlBuilder {
+
+ public MySqlBuilder(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+
+ // we need to handle the backslash first otherwise the other
+ // already escaped sequences would be affected
+ addEscapedCharSequence("\\", "\\\\");
+ addEscapedCharSequence("\0", "\\0");
+ addEscapedCharSequence("'", "\\'");
+ addEscapedCharSequence("\"", "\\\"");
+ addEscapedCharSequence("\b", "\\b");
+ addEscapedCharSequence("\n", "\\n");
+ addEscapedCharSequence("\r", "\\r");
+ addEscapedCharSequence("\t", "\\t");
+ addEscapedCharSequence("\u001A", "\\Z");
+ addEscapedCharSequence("%", "\\%");
+ addEscapedCharSequence("_", "\\_");
+ }
+
+ @Override
+ public void dropTable(Table table, StringBuilder ddl) {
+ ddl.append("DROP TABLE IF EXISTS ");
+ printIdentifier(getTableName(table.getName()), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ protected void writeColumnAutoIncrementStmt(Table table, Column column, StringBuilder ddl) {
+ ddl.append("AUTO_INCREMENT");
+ }
+
+ @Override
+ protected boolean shouldGeneratePrimaryKeys(Column[] primaryKeyColumns) {
+ // mySQL requires primary key indication for autoincrement key columns
+ // I'm not sure why the default skips the pk statement if all are
+ // identity
+ return true;
+ }
+
+ /*
+ * Normally mysql will return the LAST_INSERT_ID as the column name for the
+ * inserted id. Since ddlutils expects the real column name of the field
+ * that is autoincrementing, the column has an alias of that column name.
+ */
+ @Override
+ public String getSelectLastIdentityValues(Table table) {
+ String autoIncrementKeyName = "";
+ if (table.getAutoIncrementColumns().length > 0) {
+ autoIncrementKeyName = table.getAutoIncrementColumns()[0].getName();
+ }
+ return "SELECT LAST_INSERT_ID() " + autoIncrementKeyName;
+ }
+
+ @Override
+ protected void writeExternalForeignKeyDropStmt(Table table, ForeignKey foreignKey,
+ StringBuilder ddl) {
+ writeTableAlterStmt(table, ddl);
+ ddl.append("DROP FOREIGN KEY ");
+ printIdentifier(getForeignKeyName(table, foreignKey), ddl);
+ printEndOfStatement(ddl);
+
+ if (foreignKey.isAutoIndexPresent()) {
+ writeTableAlterStmt(table, ddl);
+ ddl.append("DROP INDEX ");
+ printIdentifier(getForeignKeyName(table, foreignKey), ddl);
+ printEndOfStatement(ddl);
+ }
+ }
+
+ @Override
+ protected void processTableStructureChanges(Database currentModel, Database desiredModel,
+ Table sourceTable, Table targetTable, List changes, StringBuilder ddl) {
+ // in order to utilize the ALTER TABLE ADD COLUMN AFTER statement
+ // we have to apply the add column changes in the correct order
+ // thus we first gather all add column changes and then execute them
+ ArrayList addColumnChanges = new ArrayList();
+
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof AddColumnChange) {
+ addColumnChanges.add((AddColumnChange) change);
+ changeIt.remove();
+ }
+ }
+ for (Iterator changeIt = addColumnChanges.iterator(); changeIt.hasNext();) {
+ AddColumnChange addColumnChange = changeIt.next();
+
+ processChange(currentModel, desiredModel, addColumnChange, ddl);
+ changeIt.remove();
+ }
+
+ ListOrderedSet changedColumns = new ListOrderedSet();
+
+ // we don't have to care about the order because the comparator will
+ // have ensured that a add primary key change comes after all necessary
+ // columns are present
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof RemoveColumnChange) {
+ processChange(currentModel, desiredModel, (RemoveColumnChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof AddPrimaryKeyChange) {
+ processChange(currentModel, desiredModel, (AddPrimaryKeyChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof PrimaryKeyChange) {
+ processChange(currentModel, desiredModel, (PrimaryKeyChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof RemovePrimaryKeyChange) {
+ processChange(currentModel, desiredModel, (RemovePrimaryKeyChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof ColumnChange) {
+ // we gather all changed columns because we can use the ALTER
+ // TABLE MODIFY COLUMN
+ // statement for them
+ changedColumns.add(((ColumnChange) change).getChangedColumn());
+ changeIt.remove();
+ }
+ }
+ for (Iterator columnIt = changedColumns.iterator(); columnIt.hasNext();) {
+ Column sourceColumn = columnIt.next();
+ Column targetColumn = targetTable.findColumn(sourceColumn.getName(),
+ platform.isDelimitedIdentifierModeOn());
+
+ processColumnChange(sourceTable, targetTable, sourceColumn, targetColumn, ddl);
+ }
+ }
+
+ /*
+ * Processes the addition of a column to a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ AddColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD COLUMN ");
+ writeColumn(change.getChangedTable(), change.getNewColumn(), ddl);
+ if (change.getPreviousColumn() != null) {
+ ddl.append(" AFTER ");
+ printIdentifier(getColumnName(change.getPreviousColumn()), ddl);
+ } else {
+ ddl.append(" FIRST");
+ }
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a column from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemoveColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP COLUMN ");
+ printIdentifier(getColumnName(change.getColumn()), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a primary key from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemovePrimaryKeyChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP PRIMARY KEY");
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the change of the primary key of a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ PrimaryKeyChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP PRIMARY KEY");
+ printEndOfStatement(ddl);
+ writeExternalPrimaryKeysCreateStmt(change.getChangedTable(),
+ change.getNewPrimaryKeyColumns(), ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes a change to a column.
+ */
+ protected void processColumnChange(Table sourceTable, Table targetTable, Column sourceColumn,
+ Column targetColumn, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(sourceTable.getName()), ddl);
+ printIndent(ddl);
+ ddl.append("MODIFY COLUMN ");
+ writeColumn(targetTable, targetColumn, ddl);
+ printEndOfStatement(ddl);
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mysql/MySqlDdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mysql/MySqlDdlReader.java
new file mode 100644
index 0000000000..f84270a440
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mysql/MySqlDdlReader.java
@@ -0,0 +1,109 @@
+package org.jumpmind.db.platform.mysql;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.Map;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.platform.AbstractJdbcDdlReader;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.log.Log;
+
+/*
+ * Reads a database model from a MySql database.
+ */
+public class MySqlDdlReader extends AbstractJdbcDdlReader {
+
+ public MySqlDdlReader(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ setDefaultCatalogPattern(null);
+ setDefaultSchemaPattern(null);
+ setDefaultTablePattern(null);
+ }
+
+ @Override
+ protected Table readTable(Connection connection, DatabaseMetaDataWrapper metaData,
+ Map values) throws SQLException {
+ // TODO This needs some more work, since table names can be case
+ // sensitive or lowercase
+ // depending on the platform (really cute).
+ // See http://dev.mysql.com/doc/refman/4.1/en/name-case-sensitivity.html
+ // for more info.
+
+ Table table = super.readTable(connection, metaData, values);
+
+ if (table != null) {
+ determineAutoIncrementFromResultSetMetaData(connection, table,
+ table.getPrimaryKeyColumns());
+ }
+ return table;
+ }
+
+ @Override
+ protected Integer overrideJdbcTypeForColumn(Map values) {
+ String typeName = (String) values.get("TYPE_NAME");
+ if ("YEAR".equals(typeName)) {
+ // it is safe to map a YEAR to INTEGER
+ return Types.INTEGER;
+ } else {
+ return super.overrideJdbcTypeForColumn(values);
+ }
+ }
+
+ @Override
+ protected Column readColumn(DatabaseMetaDataWrapper metaData, Map values)
+ throws SQLException {
+ Column column = super.readColumn(metaData, values);
+
+ // MySQL converts illegal date/time/timestamp values to
+ // "0000-00-00 00:00:00", but this
+ // is an illegal ISO value, so we replace it with NULL
+ if ((column.getTypeCode() == Types.TIMESTAMP)
+ && "0000-00-00 00:00:00".equals(column.getDefaultValue())) {
+ column.setDefaultValue(null);
+ }
+ // make sure the defaultvalue is null when an empty is returned.
+ if ("".equals(column.getDefaultValue())) {
+ column.setDefaultValue(null);
+ }
+ return column;
+ }
+
+ @Override
+ protected boolean isInternalPrimaryKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, IIndex index) {
+ // MySql defines a unique index "PRIMARY" for primary keys
+ return "PRIMARY".equals(index.getName());
+ }
+
+ @Override
+ protected boolean isInternalForeignKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, ForeignKey fk, IIndex index) {
+ // MySql defines a non-unique index of the same name as the fk
+ return getPlatform().getDdlBuilder().getForeignKeyName(table, fk).equals(index.getName());
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mysql/MySqlPlatform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mysql/MySqlPlatform.java
new file mode 100644
index 0000000000..63c85eaa78
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mysql/MySqlPlatform.java
@@ -0,0 +1,132 @@
+package org.jumpmind.db.platform.mysql;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.jumpmind.db.platform.AbstractJdbcDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.log.Log;
+
+/*
+ * The platform implementation for MySQL.
+ */
+public class MySqlPlatform extends AbstractJdbcDatabasePlatform {
+
+ /* Database name of this platform. */
+ public static final String DATABASENAME = "MySQL";
+
+ /* The standard MySQL jdbc driver. */
+ public static final String JDBC_DRIVER = "com.mysql.jdbc.Driver";
+
+ /* The old MySQL jdbc driver. */
+ public static final String JDBC_DRIVER_OLD = "org.gjt.mm.mysql.Driver";
+
+ /* The subprotocol used by the standard MySQL driver. */
+ public static final String JDBC_SUBPROTOCOL = "mysql";
+
+ /*
+ * Creates a new platform instance.
+ */
+ public MySqlPlatform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+
+ info.setMaxIdentifierLength(64);
+ info.setNullAsDefaultValueRequired(true);
+ info.setDefaultValuesForLongTypesSupported(false);
+ // see
+ // http://dev.mysql.com/doc/refman/4.1/en/example-auto-increment.html
+ info.setNonPKIdentityColumnsSupported(false);
+ // MySql returns synthetic default values for pk columns
+ info.setSyntheticDefaultValueForRequiredReturned(true);
+ info.setCommentPrefix("#");
+ // Double quotes are only allowed for delimiting identifiers if the
+ // server SQL mode includes ANSI_QUOTES
+ info.setDelimiterToken("`");
+
+ info.addNativeTypeMapping(Types.ARRAY, "LONGBLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.BIT, "TINYINT(1)");
+ info.addNativeTypeMapping(Types.BLOB, "LONGBLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.CLOB, "LONGTEXT", Types.LONGVARCHAR);
+ info.addNativeTypeMapping(Types.DISTINCT, "LONGBLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.FLOAT, "DOUBLE", Types.DOUBLE);
+ info.addNativeTypeMapping(Types.JAVA_OBJECT, "LONGBLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.LONGVARBINARY, "MEDIUMBLOB");
+ info.addNativeTypeMapping(Types.LONGVARCHAR, "MEDIUMTEXT");
+ info.addNativeTypeMapping(Types.NULL, "MEDIUMBLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.NUMERIC, "DECIMAL", Types.DECIMAL);
+ info.addNativeTypeMapping(Types.OTHER, "LONGBLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.REAL, "FLOAT");
+ info.addNativeTypeMapping(Types.REF, "MEDIUMBLOB", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.STRUCT, "LONGBLOB", Types.LONGVARBINARY);
+ // Since TIMESTAMP is not a stable datatype yet, and does not support a
+ // higher precision
+ // than DATETIME (year to seconds) as of MySQL 5, we map the JDBC type
+ // here to DATETIME
+ // TODO: Make this configurable
+ info.addNativeTypeMapping(Types.TIMESTAMP, "DATETIME");
+ // In MySql, TINYINT has only a range of -128 to 127
+ info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping("BOOLEAN", "TINYINT(1)", "BIT");
+ info.addNativeTypeMapping("DATALINK", "MEDIUMBLOB", "LONGVARBINARY");
+
+ info.setDefaultSize(Types.CHAR, 254);
+ info.setDefaultSize(Types.VARCHAR, 254);
+ info.setDefaultSize(Types.BINARY, 254);
+ info.setDefaultSize(Types.VARBINARY, 254);
+ info.setIdentifierQuoteString("`");
+ info.setNonBlankCharColumnSpacePadded(false);
+ info.setBlankCharColumnSpacePadded(false);
+ info.setCharColumnSpaceTrimmed(true);
+ info.setEmptyStringNulled(false);
+
+ setDelimitedIdentifierModeOn(true);
+
+
+ // MySql 5.0 returns an empty string for default values for pk columns
+ // which is different from the MySql 4 behaviour
+ info.setSyntheticDefaultValueForRequiredReturned(false);
+
+ primaryKeyViolationCodes = new int[] {1062};
+
+ ddlReader = new MySqlDdlReader(log, this);
+ ddlBuilder = new MySqlBuilder(log, this);
+ }
+
+ public String getName() {
+ return DATABASENAME;
+ }
+
+ public String getDefaultSchema() {
+ return null;
+ }
+
+ public String getDefaultCatalog() {
+ if (StringUtils.isBlank(defaultCatalog)) {
+ defaultCatalog = getSqlTemplate().queryForObject("select database()", String.class);
+ }
+ return defaultCatalog;
+ }
+
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mysql/package.html b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mysql/package.html
new file mode 100644
index 0000000000..ddbadae1ca
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/mysql/package.html
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+
+
+ This package contains the platform implementation for the
+ MySQL database.
+
+
+
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/oracle/OracleBuilder.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/oracle/OracleBuilder.java
new file mode 100644
index 0000000000..64fc9668bd
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/oracle/OracleBuilder.java
@@ -0,0 +1,423 @@
+package org.jumpmind.db.platform.oracle;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.oro.text.regex.MalformedPatternException;
+import org.apache.oro.text.regex.Pattern;
+import org.apache.oro.text.regex.PatternCompiler;
+import org.apache.oro.text.regex.Perl5Compiler;
+import org.apache.oro.text.regex.Perl5Matcher;
+import org.jumpmind.db.DdlException;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.alter.AddColumnChange;
+import org.jumpmind.db.alter.AddPrimaryKeyChange;
+import org.jumpmind.db.alter.PrimaryKeyChange;
+import org.jumpmind.db.alter.RemoveColumnChange;
+import org.jumpmind.db.alter.RemovePrimaryKeyChange;
+import org.jumpmind.db.alter.TableChange;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractDdlBuilder;
+import org.jumpmind.db.platform.PlatformUtils;
+import org.jumpmind.log.Log;
+
+/*
+ * The SQL Builder for Oracle.
+ */
+public class OracleBuilder extends AbstractDdlBuilder {
+
+ /* The regular expression pattern for ISO dates, i.e. 'YYYY-MM-DD'. */
+ private Pattern isoDatePattern;
+
+ /* The regular expression pattern for ISO times, i.e. 'HH:MI:SS'. */
+ private Pattern isoTimePattern;
+
+ /*
+ * The regular expression pattern for ISO timestamps, i.e. 'YYYY-MM-DD
+ * HH:MI:SS.fffffffff'.
+ */
+ private Pattern isoTimestampPattern;
+
+ public OracleBuilder(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+
+ addEscapedCharSequence("'", "''");
+
+ PatternCompiler compiler = new Perl5Compiler();
+
+ try {
+ isoDatePattern = compiler.compile("\\d{4}\\-\\d{2}\\-\\d{2}");
+ isoTimePattern = compiler.compile("\\d{2}:\\d{2}:\\d{2}");
+ isoTimestampPattern = compiler
+ .compile("\\d{4}\\-\\d{2}\\-\\d{2} \\d{2}:\\d{2}:\\d{2}[\\.\\d{1,8}]?");
+ } catch (MalformedPatternException ex) {
+ throw new DdlException(ex);
+ }
+ }
+
+ @Override
+ public void createTable(Table table, StringBuilder ddl) {
+ // lets create any sequences
+ Column[] columns = table.getAutoIncrementColumns();
+
+ for (int idx = 0; idx < columns.length; idx++) {
+ createAutoIncrementSequence(table, columns[idx], ddl);
+ }
+
+ super.createTable(table, ddl);
+
+ for (int idx = 0; idx < columns.length; idx++) {
+ createAutoIncrementTrigger(table, columns[idx], ddl);
+ }
+ }
+
+ @Override
+ public void dropTable(Table table, StringBuilder ddl) {
+ // The only difference to the Oracle 8/9 variant is the purge which
+ // prevents the
+ // table from being moved to the recycle bin (which is new in Oracle 10)
+ Column[] columns = table.getAutoIncrementColumns();
+
+ for (int idx = 0; idx < columns.length; idx++) {
+ dropAutoIncrementTrigger(table, columns[idx], ddl);
+ dropAutoIncrementSequence(table, columns[idx], ddl);
+ }
+
+ ddl.append("DROP TABLE ");
+ printIdentifier(getTableName(table.getName()), ddl);
+ ddl.append(" CASCADE CONSTRAINTS PURGE");
+ printEndOfStatement(ddl);
+ }
+
+ /*
+ * Creates the sequence necessary for the auto-increment of the given
+ * column.
+ */
+ protected void createAutoIncrementSequence(Table table, Column column, StringBuilder ddl) {
+ ddl.append("CREATE SEQUENCE ");
+ printIdentifier(getConstraintName("seq", table, column.getName(), null), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ /*
+ * Creates the trigger necessary for the auto-increment of the given column.
+ */
+ protected void createAutoIncrementTrigger(Table table, Column column, StringBuilder ddl) {
+ String columnName = getColumnName(column);
+ String triggerName = getConstraintName("trg", table, column.getName(), null);
+
+ if (platform.isScriptModeOn()) {
+ // For the script, we output a more nicely formatted version
+ ddl.append("CREATE OR REPLACE TRIGGER ");
+ printlnIdentifier(triggerName, ddl);
+ ddl.append("BEFORE INSERT ON ");
+ printlnIdentifier(getTableName(table.getName()), ddl);
+ ddl.append("FOR EACH ROW WHEN (new.");
+ printIdentifier(columnName, ddl);
+ println(" IS NULL)", ddl);
+ println("BEGIN", ddl);
+ ddl.append(" SELECT ");
+ printIdentifier(getConstraintName("seq", table, column.getName(), null), ddl);
+ ddl.append(".nextval INTO :new.");
+ printIdentifier(columnName, ddl);
+ ddl.append(" FROM dual");
+ println(platform.getPlatformInfo().getSqlCommandDelimiter(), ddl);
+ ddl.append("END");
+ println(platform.getPlatformInfo().getSqlCommandDelimiter(), ddl);
+ println("/", ddl);
+ println(ddl);
+ } else {
+ // note that the BEGIN ... SELECT ... END; is all in one line and
+ // does
+ // not contain a semicolon except for the END-one
+ // this way, the tokenizer will not split the statement before the
+ // END
+ ddl.append("CREATE OR REPLACE TRIGGER ");
+ printIdentifier(triggerName, ddl);
+ ddl.append(" BEFORE INSERT ON ");
+ printIdentifier(getTableName(table.getName()), ddl);
+ ddl.append(" FOR EACH ROW WHEN (new.");
+ printIdentifier(columnName, ddl);
+ println(" IS NULL)", ddl);
+ ddl.append("BEGIN SELECT ");
+ printIdentifier(getConstraintName("seq", table, column.getName(), null), ddl);
+ ddl.append(".nextval INTO :new.");
+ printIdentifier(columnName, ddl);
+ ddl.append(" FROM dual");
+ ddl.append(platform.getPlatformInfo().getSqlCommandDelimiter());
+ ddl.append(" END");
+ // It is important that there is a semicolon at the end of the
+ // statement (or more
+ // precisely, at the end of the PL/SQL block), and thus we put two
+ // semicolons here
+ // because the tokenizer will remove the one at the end
+ ddl.append(platform.getPlatformInfo().getSqlCommandDelimiter());
+ printEndOfStatement(ddl);
+ }
+ }
+
+ /*
+ * Drops the sequence used for the auto-increment of the given column.
+ */
+ protected void dropAutoIncrementSequence(Table table, Column column, StringBuilder ddl) {
+ ddl.append("DROP SEQUENCE ");
+ printIdentifier(getConstraintName("seq", table, column.getName(), null), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ /*
+ * Drops the trigger used for the auto-increment of the given column.
+ */
+ protected void dropAutoIncrementTrigger(Table table, Column column, StringBuilder ddl) {
+ ddl.append("DROP TRIGGER ");
+ printIdentifier(getConstraintName("trg", table, column.getName(), null), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ protected void createTemporaryTable(Database database, Table table, StringBuilder ddl) {
+ createTable(table, ddl);
+ }
+
+ @Override
+ protected void dropTemporaryTable(Database database, Table table, StringBuilder ddl) {
+ dropTable(table, ddl);
+ }
+
+ @Override
+ public void dropExternalForeignKeys(Table table, StringBuilder ddl) {
+ // no need to as we drop the table with CASCASE CONSTRAINTS
+ }
+
+ @Override
+ public void writeExternalIndexDropStmt(Table table, IIndex index, StringBuilder ddl) {
+ // Index names in Oracle are unique to a schema and hence Oracle does
+ // not
+ // use the ON clause
+ ddl.append("DROP INDEX ");
+ printIdentifier(getIndexName(index), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ protected void printDefaultValue(Object defaultValue, int typeCode, StringBuilder ddl) {
+ if (defaultValue != null) {
+ String defaultValueStr = defaultValue.toString();
+ boolean shouldUseQuotes = !TypeMap.isNumericType(typeCode)
+ && !defaultValueStr.startsWith("TO_DATE(");
+
+ if (shouldUseQuotes) {
+ // characters are only escaped when within a string literal
+ ddl.append(platform.getPlatformInfo().getValueQuoteToken());
+ ddl.append(escapeStringValue(defaultValueStr));
+ ddl.append(platform.getPlatformInfo().getValueQuoteToken());
+ } else {
+ ddl.append(defaultValueStr);
+ }
+ }
+ }
+
+ @Override
+ protected String getNativeDefaultValue(Column column) {
+ if ((column.getTypeCode() == Types.BIT)
+ || (PlatformUtils.supportsJava14JdbcTypes() && (column.getTypeCode() == PlatformUtils
+ .determineBooleanTypeCode()))) {
+ return getDefaultValueHelper().convert(column.getDefaultValue(), column.getTypeCode(),
+ Types.SMALLINT).toString();
+ }
+ // Oracle does not accept ISO formats, so we have to convert an ISO spec
+ // if we find one
+ // But these are the only formats that we make sure work, every other
+ // format has to be database-dependent
+ // and thus the user has to ensure that it is correct
+ else if (column.getTypeCode() == Types.DATE) {
+ if (new Perl5Matcher().matches(column.getDefaultValue(), isoDatePattern)) {
+ return "TO_DATE('" + column.getDefaultValue() + "', 'YYYY-MM-DD')";
+ }
+ } else if (column.getTypeCode() == Types.TIME) {
+ if (new Perl5Matcher().matches(column.getDefaultValue(), isoTimePattern)) {
+ return "TO_DATE('" + column.getDefaultValue() + "', 'HH24:MI:SS')";
+ }
+ } else if (column.getTypeCode() == Types.TIMESTAMP) {
+ if (new Perl5Matcher().matches(column.getDefaultValue(), isoTimestampPattern)) {
+ return "TO_DATE('" + column.getDefaultValue() + "', 'YYYY-MM-DD HH24:MI:SS')";
+ }
+ }
+ return super.getNativeDefaultValue(column);
+ }
+
+ @Override
+ protected void writeColumnAutoIncrementStmt(Table table, Column column, StringBuilder ddl) {
+ // we're using sequences instead
+ }
+
+ @Override
+ public String getSelectLastIdentityValues(Table table) {
+ Column[] columns = table.getAutoIncrementColumns();
+
+ if (columns.length > 0) {
+ StringBuffer result = new StringBuffer();
+
+ result.append("SELECT ");
+ for (int idx = 0; idx < columns.length; idx++) {
+ if (idx > 0) {
+ result.append(",");
+ }
+ result.append(getDelimitedIdentifier(getConstraintName("seq", table,
+ columns[idx].getName(), null)));
+ result.append(".currval");
+ }
+ result.append(" FROM dual");
+ return result.toString();
+ } else {
+ return null;
+ }
+ }
+
+ @Override
+ protected void processTableStructureChanges(Database currentModel, Database desiredModel,
+ Table sourceTable, Table targetTable, List changes, StringBuilder ddl) {
+ // While Oracle has an ALTER TABLE MODIFY statement, it is somewhat
+ // limited
+ // esp. if there is data in the table, so we don't use it
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof AddColumnChange) {
+ AddColumnChange addColumnChange = (AddColumnChange) change;
+
+ // Oracle can only add not insert columns
+ // Also, we cannot add NOT NULL columns unless they have a
+ // default value
+ if (!addColumnChange.isAtEnd()
+ || (addColumnChange.getNewColumn().isRequired() && (addColumnChange
+ .getNewColumn().getDefaultValue() == null))) {
+ // we need to rebuild the full table
+ return;
+ }
+ }
+ }
+
+ // First we drop primary keys as necessary
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof RemovePrimaryKeyChange) {
+ processChange(currentModel, desiredModel, (RemovePrimaryKeyChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof PrimaryKeyChange) {
+ PrimaryKeyChange pkChange = (PrimaryKeyChange) change;
+ RemovePrimaryKeyChange removePkChange = new RemovePrimaryKeyChange(
+ pkChange.getChangedTable(), pkChange.getOldPrimaryKeyColumns());
+
+ processChange(currentModel, desiredModel, removePkChange, ddl);
+ }
+ }
+
+ // Next we add/remove columns
+ // While Oracle has an ALTER TABLE MODIFY statement, it is somewhat
+ // limited
+ // esp. if there is data in the table, so we don't use it
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof AddColumnChange) {
+ processChange(currentModel, desiredModel, (AddColumnChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof RemoveColumnChange) {
+ processChange(currentModel, desiredModel, (RemoveColumnChange) change, ddl);
+ changeIt.remove();
+ }
+ }
+ // Finally we add primary keys
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof AddPrimaryKeyChange) {
+ processChange(currentModel, desiredModel, (AddPrimaryKeyChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof PrimaryKeyChange) {
+ PrimaryKeyChange pkChange = (PrimaryKeyChange) change;
+ AddPrimaryKeyChange addPkChange = new AddPrimaryKeyChange(
+ pkChange.getChangedTable(), pkChange.getNewPrimaryKeyColumns());
+
+ processChange(currentModel, desiredModel, addPkChange, ddl);
+ changeIt.remove();
+ }
+ }
+ }
+
+ /*
+ * Processes the addition of a column to a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ AddColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD ");
+ writeColumn(change.getChangedTable(), change.getNewColumn(), ddl);
+ printEndOfStatement(ddl);
+ if (change.getNewColumn().isAutoIncrement()) {
+ createAutoIncrementSequence(change.getChangedTable(), change.getNewColumn(), ddl);
+ createAutoIncrementTrigger(change.getChangedTable(), change.getNewColumn(), ddl);
+ }
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a column from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemoveColumnChange change, StringBuilder ddl) {
+ if (change.getColumn().isAutoIncrement()) {
+ dropAutoIncrementTrigger(change.getChangedTable(), change.getColumn(), ddl);
+ dropAutoIncrementSequence(change.getChangedTable(), change.getColumn(), ddl);
+ }
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP COLUMN ");
+ printIdentifier(getColumnName(change.getColumn()), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a primary key from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemovePrimaryKeyChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP PRIMARY KEY");
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/oracle/OracleDdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/oracle/OracleDdlReader.java
new file mode 100644
index 0000000000..11335e2b00
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/oracle/OracleDdlReader.java
@@ -0,0 +1,380 @@
+package org.jumpmind.db.platform.oracle;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.Date;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.sql.Types;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.apache.oro.text.regex.MalformedPatternException;
+import org.apache.oro.text.regex.Pattern;
+import org.apache.oro.text.regex.PatternCompiler;
+import org.apache.oro.text.regex.PatternMatcher;
+import org.apache.oro.text.regex.Perl5Compiler;
+import org.apache.oro.text.regex.Perl5Matcher;
+import org.jumpmind.db.DdlException;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.IDdlBuilder;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractJdbcDdlReader;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.log.Log;
+
+/*
+ * Reads a database model from an Oracle 8 database.
+ */
+public class OracleDdlReader extends AbstractJdbcDdlReader {
+
+ /* The regular expression pattern for the Oracle conversion of ISO dates. */
+ private Pattern oracleIsoDatePattern;
+
+ /* The regular expression pattern for the Oracle conversion of ISO times. */
+ private Pattern oracleIsoTimePattern;
+
+ /*
+ * The regular expression pattern for the Oracle conversion of ISO
+ * timestamps.
+ */
+ private Pattern oracleIsoTimestampPattern;
+
+ public OracleDdlReader(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ setDefaultCatalogPattern(null);
+ setDefaultSchemaPattern(null);
+ setDefaultTablePattern("%");
+
+ PatternCompiler compiler = new Perl5Compiler();
+
+ try {
+ oracleIsoDatePattern = compiler.compile("TO_DATE\\('([^']*)'\\, 'YYYY\\-MM\\-DD'\\)");
+ oracleIsoTimePattern = compiler.compile("TO_DATE\\('([^']*)'\\, 'HH24:MI:SS'\\)");
+ oracleIsoTimestampPattern = compiler
+ .compile("TO_DATE\\('([^']*)'\\, 'YYYY\\-MM\\-DD HH24:MI:SS'\\)");
+ } catch (MalformedPatternException ex) {
+ throw new DdlException(ex);
+ }
+ }
+
+ @Override
+ protected Table readTable(Connection connection, DatabaseMetaDataWrapper metaData,
+ Map values) throws SQLException {
+ // Oracle 10 added the recycle bin which contains dropped database
+ // objects not yet purged
+ // Since we don't want entries from the recycle bin, we filter them out
+ boolean tableHasBeenDeleted = isTableInRecycleBin(connection, values);
+
+ if (!tableHasBeenDeleted) {
+ String tableName = (String) values.get("TABLE_NAME");
+
+ // system table ?
+ if (tableName.indexOf('$') > 0) {
+ return null;
+ }
+
+ Table table = super.readTable(connection, metaData, values);
+ if (table != null) {
+ determineAutoIncrementColumns(connection, table);
+ }
+
+ return table;
+ } else {
+ return null;
+ }
+ }
+
+ protected boolean isTableInRecycleBin(Connection connection, Map values)
+ throws SQLException {
+ ResultSet rs = null;
+ PreparedStatement stmt = null;
+ try {
+ stmt = connection.prepareStatement("SELECT * FROM RECYCLEBIN WHERE OBJECT_NAME=?");
+ stmt.setString(1, (String) values.get("TABLE_NAME"));
+
+ rs = stmt.executeQuery();
+ return rs.next();
+ } finally {
+ close(rs);
+ close(stmt);
+ }
+ }
+
+ @Override
+ protected Integer overrideJdbcTypeForColumn(Map values) {
+ String typeName = (String) values.get("TYPE_NAME");
+ if (typeName != null && typeName.startsWith("DATE")) {
+ return Types.DATE;
+ } else if (typeName != null && typeName.startsWith("TIMESTAMP")) {
+ // This is for Oracle's TIMESTAMP(9)
+ return Types.TIMESTAMP;
+ } else if (typeName != null && typeName.startsWith("NVARCHAR")) {
+ // This is for Oracle's NVARCHAR type
+ return Types.VARCHAR;
+ } else if (typeName != null && typeName.startsWith("LONGNVARCHAR")) {
+ return Types.LONGVARCHAR;
+ } else if (typeName != null && typeName.startsWith("NCHAR")) {
+ return Types.CHAR;
+ } else if (typeName != null && typeName.startsWith("NCLOB")) {
+ return Types.CLOB;
+ } else if (typeName != null && typeName.startsWith("BINARY_FLOAT")) {
+ return Types.FLOAT;
+ } else if (typeName != null && typeName.startsWith("BINARY_DOUBLE")) {
+ return Types.DOUBLE;
+ } else {
+ return super.overrideJdbcTypeForColumn(values);
+ }
+ }
+
+ @Override
+ protected Column readColumn(DatabaseMetaDataWrapper metaData, Map values)
+ throws SQLException {
+ Column column = super.readColumn(metaData, values);
+ if (column.getTypeCode() == Types.DECIMAL) {
+ // We're back-mapping the NUMBER columns returned by Oracle
+ // Note that the JDBC driver returns DECIMAL for these NUMBER
+ // columns
+ switch (column.getSizeAsInt()) {
+ case 1:
+ if (column.getScale() == 0) {
+ column.setTypeCode(Types.BIT);
+ }
+ break;
+ case 3:
+ if (column.getScale() == 0) {
+ column.setTypeCode(Types.TINYINT);
+ }
+ break;
+ case 5:
+ if (column.getScale() == 0) {
+ column.setTypeCode(Types.SMALLINT);
+ }
+ break;
+ case 18:
+ column.setTypeCode(Types.REAL);
+ break;
+ case 22:
+ if (column.getScale() == 0) {
+ column.setTypeCode(Types.INTEGER);
+ }
+ break;
+ case 38:
+ if (column.getScale() == 0) {
+ column.setTypeCode(Types.BIGINT);
+ } else {
+ column.setTypeCode(Types.DOUBLE);
+ }
+ break;
+ }
+ } else if (column.getTypeCode() == Types.FLOAT) {
+ // Same for REAL, FLOAT, DOUBLE PRECISION, which all back-map to
+ // FLOAT but with
+ // different sizes (63 for REAL, 126 for FLOAT/DOUBLE PRECISION)
+ switch (column.getSizeAsInt()) {
+ case 63:
+ column.setTypeCode(Types.REAL);
+ break;
+ case 126:
+ column.setTypeCode(Types.DOUBLE);
+ break;
+ }
+ } else if ((column.getTypeCode() == Types.DATE)
+ || (column.getTypeCode() == Types.TIMESTAMP)) {
+ // we also reverse the ISO-format adaptation, and adjust the default
+ // value to timestamp
+ if (column.getDefaultValue() != null) {
+ PatternMatcher matcher = new Perl5Matcher();
+ Timestamp timestamp = null;
+
+ if (matcher.matches(column.getDefaultValue(), oracleIsoTimestampPattern)) {
+ String timestampVal = matcher.getMatch().group(1);
+
+ timestamp = Timestamp.valueOf(timestampVal);
+ } else if (matcher.matches(column.getDefaultValue(), oracleIsoDatePattern)) {
+ String dateVal = matcher.getMatch().group(1);
+
+ timestamp = new Timestamp(Date.valueOf(dateVal).getTime());
+ } else if (matcher.matches(column.getDefaultValue(), oracleIsoTimePattern)) {
+ String timeVal = matcher.getMatch().group(1);
+
+ timestamp = new Timestamp(Time.valueOf(timeVal).getTime());
+ }
+ if (timestamp != null) {
+ column.setDefaultValue(timestamp.toString());
+ }
+ }
+ } else if (TypeMap.isTextType(column.getTypeCode())) {
+ column.setDefaultValue(unescape(column.getDefaultValue(), "'", "''"));
+ }
+ return column;
+ }
+
+ /*
+ * Helper method that determines the auto increment status using Firebird's
+ * system tables.
+ *
+ * @param table The table
+ */
+ protected void determineAutoIncrementColumns(Connection connection, Table table)
+ throws SQLException {
+ Column[] columns = table.getColumns();
+
+ for (int idx = 0; idx < columns.length; idx++) {
+ columns[idx].setAutoIncrement(isAutoIncrement(connection, table, columns[idx]));
+ }
+ }
+
+ /*
+ * Tries to determine whether the given column is an identity column.
+ *
+ * @param table The table
+ *
+ * @param column The column
+ *
+ * @return true if the column is an identity column
+ */
+ protected boolean isAutoIncrement(Connection connection, Table table, Column column)
+ throws SQLException {
+ // TODO: For now, we only check whether there is a sequence & trigger as
+ // generated by DdlUtils
+ // But once sequence/trigger support is in place, it might be possible
+ // to 'parse' the
+ // trigger body (via SELECT trigger_name, trigger_body FROM
+ // user_triggers) in order to
+ // determine whether it fits our auto-increment definition
+ PreparedStatement prepStmt = null;
+ IDdlBuilder builder = getPlatform().getDdlBuilder();
+ String triggerName = builder.getConstraintName("trg", table, column.getName(), null);
+ String seqName = builder.getConstraintName("seq", table, column.getName(), null);
+
+ if (!getPlatform().isDelimitedIdentifierModeOn()) {
+ triggerName = triggerName.toUpperCase();
+ seqName = seqName.toUpperCase();
+ }
+ try {
+ prepStmt = connection
+ .prepareStatement("SELECT * FROM user_triggers WHERE trigger_name = ?");
+ prepStmt.setString(1, triggerName);
+
+ ResultSet resultSet = prepStmt.executeQuery();
+
+ if (!resultSet.next()) {
+ return false;
+ }
+ // we have a trigger, so lets check the sequence
+ prepStmt.close();
+
+ prepStmt = connection
+ .prepareStatement("SELECT * FROM user_sequences WHERE sequence_name = ?");
+ prepStmt.setString(1, seqName);
+
+ resultSet = prepStmt.executeQuery();
+ return resultSet.next();
+ } finally {
+ if (prepStmt != null) {
+ prepStmt.close();
+ }
+ }
+ }
+
+ @Override
+ protected Collection readIndices(Connection connection,
+ DatabaseMetaDataWrapper metaData, String tableName) throws SQLException {
+ // Oracle bug 4999817 causes a table analyze to execute in response to a
+ // call to
+ // DatabaseMetaData#getIndexInfo.
+ // The bug is fixed in driver version 10.2.0.4. The bug is present in at
+ // least
+ // driver versions 10.2.0.1.0, 10.1.0.2.0, and 9.2.0.5.
+ // To avoid this bug, we will access user_indexes view.
+ // This also allows us to filter system-generated indices which are
+ // identified by either
+ // having GENERATED='Y' in the query result, or by their index names
+ // being equal to the
+ // name of the primary key of the table
+
+ StringBuilder query = new StringBuilder();
+
+ query.append("SELECT a.INDEX_NAME, a.INDEX_TYPE, a.UNIQUENESS, b.COLUMN_NAME, b.COLUMN_POSITION FROM USER_INDEXES a, USER_IND_COLUMNS b WHERE ");
+ query.append("a.TABLE_NAME=? AND a.GENERATED=? AND a.TABLE_TYPE=? AND a.TABLE_NAME=b.TABLE_NAME AND a.INDEX_NAME=b.INDEX_NAME AND ");
+ query.append("a.INDEX_NAME NOT IN (SELECT DISTINCT c.CONSTRAINT_NAME FROM USER_CONSTRAINTS c WHERE c.CONSTRAINT_TYPE=? AND c.TABLE_NAME=a.TABLE_NAME");
+ if (metaData.getSchemaPattern() != null) {
+ query.append(" AND c.OWNER LIKE ?) AND a.TABLE_OWNER LIKE ?");
+ } else {
+ query.append(")");
+ }
+
+ Map indices = new LinkedHashMap();
+ PreparedStatement stmt = null;
+
+ try {
+ stmt = connection.prepareStatement(query.toString());
+ stmt.setString(
+ 1,
+ getPlatform().isDelimitedIdentifierModeOn() ? tableName : tableName
+ .toUpperCase());
+ stmt.setString(2, "N");
+ stmt.setString(3, "TABLE");
+ stmt.setString(4, "P");
+ if (metaData.getSchemaPattern() != null) {
+ stmt.setString(5, metaData.getSchemaPattern().toUpperCase());
+ stmt.setString(6, metaData.getSchemaPattern().toUpperCase());
+ }
+
+ ResultSet rs = stmt.executeQuery();
+ Map values = new HashMap();
+
+ while (rs.next()) {
+ String name = rs.getString(1);
+ String type = rs.getString(2);
+ // Only read in normal oracle indexes
+ if (type.startsWith("NORMAL")) {
+ values.put("INDEX_TYPE", new Short(DatabaseMetaData.tableIndexOther));
+ values.put("INDEX_NAME", name);
+ values.put("NON_UNIQUE",
+ "UNIQUE".equalsIgnoreCase(rs.getString(3)) ? Boolean.FALSE
+ : Boolean.TRUE);
+ values.put("COLUMN_NAME", rs.getString(4));
+ values.put("ORDINAL_POSITION", new Short(rs.getShort(5)));
+
+ readIndex(metaData, values, indices);
+ } else {
+ log.warn("Skipping index " + name + " of type " + type);
+ }
+ }
+ } finally {
+ if (stmt != null) {
+ stmt.close();
+ }
+ }
+ return indices.values();
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/oracle/OraclePlatform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/oracle/OraclePlatform.java
new file mode 100644
index 0000000000..28ba883bd9
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/oracle/OraclePlatform.java
@@ -0,0 +1,135 @@
+package org.jumpmind.db.platform.oracle;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.jumpmind.db.platform.AbstractJdbcDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.db.sql.jdbc.JdbcSqlTemplate;
+import org.jumpmind.log.Log;
+import org.springframework.jdbc.support.lob.OracleLobHandler;
+
+/*
+ * The platform for Oracle 8.
+ */
+public class OraclePlatform extends AbstractJdbcDatabasePlatform {
+
+ /* Database name of this platform. */
+ public static final String DATABASENAME = "Oracle";
+
+ /* The standard Oracle jdbc driver. */
+ public static final String JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver";
+
+ /* The old Oracle jdbc driver. */
+ public static final String JDBC_DRIVER_OLD = "oracle.jdbc.dnlddriver.OracleDriver";
+
+ /* The thin subprotocol used by the standard Oracle driver. */
+ public static final String JDBC_SUBPROTOCOL_THIN = "oracle:thin";
+
+ /* The thin subprotocol used by the standard Oracle driver. */
+ public static final String JDBC_SUBPROTOCOL_OCI8 = "oracle:oci8";
+
+ /* The old thin subprotocol used by the standard Oracle driver. */
+ public static final String JDBC_SUBPROTOCOL_THIN_OLD = "oracle:dnldthin";
+
+ /*
+ * Creates a new platform instance.
+ */
+ public OraclePlatform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+
+ info.setMaxIdentifierLength(30);
+ info.setIdentityStatusReadingSupported(false);
+
+ // Note that the back-mappings are partially done by the model reader,
+ // not the driver
+ info.addNativeTypeMapping(Types.ARRAY, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.BIGINT, "NUMBER(38)");
+ info.addNativeTypeMapping(Types.BINARY, "RAW", Types.VARBINARY);
+ info.addNativeTypeMapping(Types.BIT, "NUMBER(1)");
+ info.addNativeTypeMapping(Types.DATE, "DATE", Types.TIMESTAMP);
+ info.addNativeTypeMapping(Types.DECIMAL, "NUMBER");
+ info.addNativeTypeMapping(Types.DISTINCT, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.DOUBLE, "DOUBLE PRECISION");
+ info.addNativeTypeMapping(Types.FLOAT, "FLOAT", Types.DOUBLE);
+ info.addNativeTypeMapping(Types.JAVA_OBJECT, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.LONGVARBINARY, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.LONGVARCHAR, "CLOB", Types.CLOB);
+ info.addNativeTypeMapping(Types.NULL, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.NUMERIC, "NUMBER", Types.DECIMAL);
+ info.addNativeTypeMapping(Types.INTEGER, "NUMBER(22)", Types.DECIMAL);
+ info.addNativeTypeMapping(Types.OTHER, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.REF, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.SMALLINT, "NUMBER(5)");
+ info.addNativeTypeMapping(Types.STRUCT, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.TIME, "DATE", Types.DATE);
+ info.addNativeTypeMapping(Types.TIMESTAMP, "TIMESTAMP");
+ info.addNativeTypeMapping(Types.TINYINT, "NUMBER(3)");
+ info.addNativeTypeMapping(Types.VARBINARY, "RAW");
+ info.addNativeTypeMapping(Types.VARCHAR, "VARCHAR2");
+
+ info.addNativeTypeMapping("BOOLEAN", "NUMBER(1)", "BIT");
+ info.addNativeTypeMapping("DATALINK", "BLOB", "BLOB");
+
+ info.setDefaultSize(Types.CHAR, 254);
+ info.setDefaultSize(Types.VARCHAR, 254);
+ info.setDefaultSize(Types.BINARY, 254);
+ info.setDefaultSize(Types.VARBINARY, 254);
+
+ info.setStoresUpperCaseInCatalog(true);
+ info.setDateOverridesToTimestamp(true);
+ info.setNonBlankCharColumnSpacePadded(true);
+ info.setBlankCharColumnSpacePadded(true);
+ info.setCharColumnSpaceTrimmed(false);
+ info.setEmptyStringNulled(true);
+
+ primaryKeyViolationCodes = new int[] {1};
+
+ ddlReader = new OracleDdlReader(log, this);
+ ddlBuilder = new OracleBuilder(log, this);
+ }
+
+ @Override
+ protected void createSqlTemplate() {
+ this.sqlTemplate = new JdbcSqlTemplate(dataSource, settings, new OracleLobHandler());
+ }
+
+ public String getName() {
+ return DATABASENAME;
+ }
+
+
+ public String getDefaultCatalog() {
+ return null;
+ }
+
+ public String getDefaultSchema() {
+ if (StringUtils.isBlank(defaultSchema)) {
+ defaultSchema = (String) getSqlTemplate().queryForObject(
+ "SELECT sys_context('USERENV', 'CURRENT_SCHEMA') FROM dual", String.class);
+ }
+ return defaultSchema;
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/oracle/package.html b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/oracle/package.html
new file mode 100644
index 0000000000..8058e338e9
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/oracle/package.html
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+
+
+ This package contains the platform implementation for the
+ Oracle database.
+
+
+
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/PostgreSqlBuilder.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/PostgreSqlBuilder.java
new file mode 100644
index 0000000000..1f5727ffec
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/PostgreSqlBuilder.java
@@ -0,0 +1,223 @@
+package org.jumpmind.db.platform.postgresql;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.Iterator;
+import java.util.List;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.alter.AddColumnChange;
+import org.jumpmind.db.alter.RemoveColumnChange;
+import org.jumpmind.db.alter.TableChange;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.platform.AbstractDdlBuilder;
+import org.jumpmind.log.Log;
+
+/*
+ * The SQL Builder for PostgresSql.
+ */
+public class PostgreSqlBuilder extends AbstractDdlBuilder {
+
+ public PostgreSqlBuilder(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+
+ // we need to handle the backslash first otherwise the other
+ // already escaped sequences would be affected
+ addEscapedCharSequence("\\", "\\\\");
+ addEscapedCharSequence("'", "\\'");
+ addEscapedCharSequence("\b", "\\b");
+ addEscapedCharSequence("\f", "\\f");
+ addEscapedCharSequence("\n", "\\n");
+ addEscapedCharSequence("\r", "\\r");
+ addEscapedCharSequence("\t", "\\t");
+ }
+
+ @Override
+ public void dropTable(Table table, StringBuilder ddl) {
+ ddl.append("DROP TABLE ");
+ printIdentifier(getTableName(table.getName()), ddl);
+ ddl.append(" CASCADE");
+ printEndOfStatement(ddl);
+
+ Column[] columns = table.getAutoIncrementColumns();
+
+ for (int idx = 0; idx < columns.length; idx++) {
+ dropAutoIncrementSequence(table, columns[idx], ddl);
+ }
+ }
+
+ @Override
+ public void writeExternalIndexDropStmt(Table table, IIndex index, StringBuilder ddl) {
+ ddl.append("DROP INDEX ");
+ printIdentifier(getIndexName(index), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ public void createTable(Table table, StringBuilder ddl) {
+ for (int idx = 0; idx < table.getColumnCount(); idx++) {
+ Column column = table.getColumn(idx);
+
+ if (column.isAutoIncrement()) {
+ createAutoIncrementSequence(table, column, ddl);
+ }
+ }
+ super.createTable(table, ddl);
+ }
+
+ /*
+ * Creates the auto-increment sequence that is then used in the column.
+ *
+ * @param table The table
+ *
+ * @param column The column
+ */
+ private void createAutoIncrementSequence(Table table, Column column, StringBuilder ddl) {
+ ddl.append("CREATE SEQUENCE ");
+ printIdentifier(getConstraintName(null, table, column.getName(), "seq"), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ /*
+ * Creates the auto-increment sequence that is then used in the column.
+ *
+ * @param table The table
+ *
+ * @param column The column
+ */
+ private void dropAutoIncrementSequence(Table table, Column column, StringBuilder ddl) {
+ ddl.append("DROP SEQUENCE ");
+ printIdentifier(getConstraintName(null, table, column.getName(), "seq"), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ protected void writeColumnAutoIncrementStmt(Table table, Column column, StringBuilder ddl) {
+ ddl.append(" DEFAULT nextval('");
+ printIdentifier(getConstraintName(null, table, column.getName(), "seq"), ddl);
+ ddl.append("')");
+ }
+
+ @Override
+ public String getSelectLastIdentityValues(Table table) {
+ Column[] columns = table.getAutoIncrementColumns();
+
+ if (columns.length == 0) {
+ return null;
+ } else {
+ StringBuffer result = new StringBuffer();
+
+ result.append("SELECT ");
+ for (int idx = 0; idx < columns.length; idx++) {
+ if (idx > 0) {
+ result.append(", ");
+ }
+ result.append("currval('");
+ result.append(getDelimitedIdentifier(getConstraintName(null, table,
+ columns[idx].getName(), "seq")));
+ result.append("') AS ");
+ result.append(getDelimitedIdentifier(columns[idx].getName()));
+ }
+ return result.toString();
+ }
+ }
+
+ @Override
+ protected void processTableStructureChanges(Database currentModel, Database desiredModel,
+ Table sourceTable, Table targetTable, List changes, StringBuilder ddl) {
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = changeIt.next();
+
+ if (change instanceof AddColumnChange) {
+ AddColumnChange addColumnChange = (AddColumnChange) change;
+
+ // We can only use PostgreSQL-specific SQL if
+ // * the column is not set to NOT NULL (the constraint would be
+ // applied immediately
+ // which will not work if there is already data in the table)
+ // * the column has no default value (it would be applied after
+ // the change which
+ // means that PostgreSQL would behave differently from other
+ // databases where the
+ // default is applied to every column)
+ // * the column is added at the end of the table (PostgreSQL
+ // does not support
+ // insertion of a column)
+ if (!addColumnChange.getNewColumn().isRequired()
+ && (addColumnChange.getNewColumn().getDefaultValue() == null)
+ && (addColumnChange.getNextColumn() == null)) {
+ processChange(currentModel, desiredModel, addColumnChange, ddl);
+ changeIt.remove();
+ }
+ } else if (change instanceof RemoveColumnChange) {
+ processChange(currentModel, desiredModel, (RemoveColumnChange) change, ddl);
+ changeIt.remove();
+ }
+ }
+ super.processTableStructureChanges(currentModel, desiredModel, sourceTable, targetTable,
+ changes, ddl);
+ }
+
+ /*
+ * Processes the addition of a column to a table.
+ *
+ * @param currentModel The current database schema
+ *
+ * @param desiredModel The desired database schema
+ *
+ * @param change The change object
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ AddColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD COLUMN ");
+ writeColumn(change.getChangedTable(), change.getNewColumn(), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a column from a table.
+ *
+ * @param currentModel The current database schema
+ *
+ * @param desiredModel The desired database schema
+ *
+ * @param change The change object
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemoveColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP COLUMN ");
+ printIdentifier(getColumnName(change.getColumn()), ddl);
+ printEndOfStatement(ddl);
+ if (change.getColumn().isAutoIncrement()) {
+ dropAutoIncrementSequence(change.getChangedTable(), change.getColumn(), ddl);
+ }
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/PostgreSqlDdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/PostgreSqlDdlReader.java
new file mode 100644
index 0000000000..b8126269e3
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/PostgreSqlDdlReader.java
@@ -0,0 +1,220 @@
+package org.jumpmind.db.platform.postgresql;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractJdbcDdlReader;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.log.Log;
+
+/*
+ * Reads a database model from a PostgreSql database.
+ */
+public class PostgreSqlDdlReader extends AbstractJdbcDdlReader {
+
+ public PostgreSqlDdlReader(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ setDefaultCatalogPattern(null);
+ setDefaultSchemaPattern(null);
+ setDefaultTablePattern(null);
+ }
+
+ @Override
+ protected Table readTable(Connection connection, DatabaseMetaDataWrapper metaData,
+ Map values) throws SQLException {
+ Table table = super.readTable(connection, metaData, values);
+
+ if (table != null) {
+ // PostgreSQL also returns unique indics for non-pk auto-increment
+ // columns
+ // which are of the form "[table]_[column]_key"
+ HashMap uniquesByName = new HashMap();
+
+ for (int indexIdx = 0; indexIdx < table.getIndexCount(); indexIdx++) {
+ IIndex index = table.getIndex(indexIdx);
+
+ if (index.isUnique() && (index.getName() != null)) {
+ uniquesByName.put(index.getName(), index);
+ }
+ }
+ for (int columnIdx = 0; columnIdx < table.getColumnCount(); columnIdx++) {
+ Column column = table.getColumn(columnIdx);
+ if (column.isAutoIncrement() && !column.isPrimaryKey()) {
+ String indexName = table.getName() + "_" + column.getName() + "_key";
+
+ if (uniquesByName.containsKey(indexName)) {
+ table.removeIndex((IIndex) uniquesByName.get(indexName));
+ uniquesByName.remove(indexName);
+ }
+ }
+ }
+ }
+ return table;
+ }
+
+ @Override
+ protected Integer overrideJdbcTypeForColumn(Map values) {
+ String typeName = (String) values.get("TYPE_NAME");
+ if (typeName != null && typeName.equalsIgnoreCase("ABSTIME")) {
+ return Types.TIMESTAMP;
+ } else if (typeName != null && typeName.equalsIgnoreCase("OID")) {
+ return Types.BLOB;
+ } else {
+ return super.overrideJdbcTypeForColumn(values);
+ }
+ }
+
+ @Override
+ protected Column readColumn(DatabaseMetaDataWrapper metaData, Map values) throws SQLException {
+ Column column = super.readColumn(metaData, values);
+
+ if (column.getSize() != null) {
+ if (column.getSizeAsInt() <= 0) {
+ column.setSize(null);
+ // PostgreSQL reports BYTEA and TEXT as BINARY(-1) and
+ // VARCHAR(-1) respectively
+ // Since we cannot currently use the Blob/Clob interface with
+ // BYTEA, we instead
+ // map them to LONGVARBINARY/LONGVARCHAR
+ if (column.getTypeCode() == Types.BINARY) {
+ column.setTypeCode(Types.LONGVARBINARY);
+ } else if (column.getTypeCode() == Types.VARCHAR) {
+ column.setTypeCode(Types.LONGVARCHAR);
+ }
+ }
+ // fix issue DDLUTILS-165 as postgresql-8.2-504-jdbc3.jar seems to
+ // return Integer.MAX_VALUE
+ // on columns defined as TEXT.
+ else if (column.getSizeAsInt() == Integer.MAX_VALUE) {
+ column.setSize(null);
+ if (column.getTypeCode() == Types.VARCHAR) {
+ column.setTypeCode(Types.LONGVARCHAR);
+ } else if (column.getTypeCode() == Types.BINARY) {
+ column.setTypeCode(Types.LONGVARBINARY);
+ }
+ }
+ }
+
+ String defaultValue = column.getDefaultValue();
+
+ if ((defaultValue != null) && (defaultValue.length() > 0)) {
+ // If the default value looks like
+ // "nextval('ROUNDTRIP_VALUE_seq'::text)"
+ // then it is an auto-increment column
+ if (defaultValue.startsWith("nextval(")) {
+ column.setAutoIncrement(true);
+ defaultValue = null;
+ } else {
+ // PostgreSQL returns default values in the forms
+ // "-9000000000000000000::bigint" or
+ // "'some value'::character varying" or "'2000-01-01'::date"
+ switch (column.getTypeCode()) {
+ case Types.INTEGER:
+ case Types.BIGINT:
+ case Types.DECIMAL:
+ case Types.NUMERIC:
+ defaultValue = extractUndelimitedDefaultValue(defaultValue);
+ break;
+ case Types.CHAR:
+ case Types.VARCHAR:
+ case Types.LONGVARCHAR:
+ case Types.DATE:
+ case Types.TIME:
+ case Types.TIMESTAMP:
+ defaultValue = extractDelimitedDefaultValue(defaultValue);
+ break;
+ }
+ if (TypeMap.isTextType(column.getTypeCode())) {
+ // We assume escaping via double quote (see also the
+ // backslash_quote setting:
+ // http://www.postgresql.org/docs/7.4/interactive/runtime-config.html#RUNTIME-CONFIG-COMPATIBLE)
+ defaultValue = unescape(defaultValue, "'", "''");
+ }
+ }
+ column.setDefaultValue(defaultValue);
+ }
+ return column;
+ }
+
+ /*
+ * Extractes the default value from a default value spec of the form
+ * "'some value'::character varying" or "'2000-01-01'::date".
+ *
+ * @param defaultValue The default value spec
+ *
+ * @return The default value
+ */
+ private String extractDelimitedDefaultValue(String defaultValue) {
+ if (defaultValue.startsWith("'")) {
+ int valueEnd = defaultValue.indexOf("'::");
+
+ if (valueEnd > 0) {
+ return defaultValue.substring("'".length(), valueEnd);
+ }
+ }
+ return defaultValue;
+ }
+
+ /*
+ * Extractes the default value from a default value spec of the form
+ * "-9000000000000000000::bigint".
+ *
+ * @param defaultValue The default value spec
+ *
+ * @return The default value
+ */
+ private String extractUndelimitedDefaultValue(String defaultValue) {
+ int valueEnd = defaultValue.indexOf("::");
+
+ if (valueEnd > 0) {
+ defaultValue = defaultValue.substring(0, valueEnd);
+ } else {
+ if (defaultValue.startsWith("(") && defaultValue.endsWith(")")) {
+ defaultValue = defaultValue.substring(1, defaultValue.length() - 1);
+ }
+ }
+ return defaultValue;
+ }
+
+ @Override
+ protected boolean isInternalForeignKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, ForeignKey fk, IIndex index) {
+ // PostgreSQL does not return an index for a foreign key
+ return false;
+ }
+
+ @Override
+ protected boolean isInternalPrimaryKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, IIndex index) {
+ return table.doesIndexContainOnlyPrimaryKeyColumns(index);
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/PostgreSqlPlatform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/PostgreSqlPlatform.java
new file mode 100644
index 0000000000..489b8d4656
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/PostgreSqlPlatform.java
@@ -0,0 +1,224 @@
+package org.jumpmind.db.platform.postgresql;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Array;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.Map;
+
+import javax.sql.DataSource;
+import javax.sql.rowset.serial.SerialBlob;
+
+import org.apache.commons.lang.StringUtils;
+import org.jumpmind.db.BinaryEncoding;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.platform.AbstractJdbcDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.db.sql.DmlStatement;
+import org.jumpmind.db.sql.DmlStatement.DmlType;
+import org.jumpmind.log.Log;
+
+/*
+ * The platform implementation for PostgresSql.
+ */
+public class PostgreSqlPlatform extends AbstractJdbcDatabasePlatform {
+ /* Database name of this platform. */
+ public static final String DATABASENAME = "PostgreSql";
+ /* The standard PostgreSQL jdbc driver. */
+ public static final String JDBC_DRIVER = "org.postgresql.Driver";
+ /* The subprotocol used by the standard PostgreSQL driver. */
+ public static final String JDBC_SUBPROTOCOL = "postgresql";
+
+ /*
+ * Creates a new platform instance.
+ */
+ public PostgreSqlPlatform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+
+ // Query timeout needs to be zero for postrgres because the jdbc driver does
+ // not support a timeout setting of of other than zero.
+ settings.setQueryTimeout(0);
+
+ // this is the default length though it might be changed when building
+ // PostgreSQL
+ // in file src/include/postgres_ext.h
+ info.setMaxIdentifierLength(31);
+
+ info.addNativeTypeMapping(Types.ARRAY, "BYTEA", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.BINARY, "BYTEA", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.BIT, "BOOLEAN");
+ info.addNativeTypeMapping(Types.BLOB, "BYTEA", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.CLOB, "TEXT", Types.LONGVARCHAR);
+ info.addNativeTypeMapping(Types.DECIMAL, "NUMERIC", Types.NUMERIC);
+ info.addNativeTypeMapping(Types.DISTINCT, "BYTEA", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.DOUBLE, "DOUBLE PRECISION");
+ info.addNativeTypeMapping(Types.FLOAT, "DOUBLE PRECISION", Types.DOUBLE);
+ info.addNativeTypeMapping(Types.JAVA_OBJECT, "BYTEA", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.LONGVARBINARY, "BYTEA");
+ info.addNativeTypeMapping(Types.LONGVARCHAR, "TEXT", Types.LONGVARCHAR);
+ info.addNativeTypeMapping(Types.NULL, "BYTEA", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.OTHER, "BYTEA", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.REF, "BYTEA", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.STRUCT, "BYTEA", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping(Types.VARBINARY, "BYTEA", Types.LONGVARBINARY);
+ info.addNativeTypeMapping("BOOLEAN", "BOOLEAN", "BIT");
+ info.addNativeTypeMapping("DATALINK", "BYTEA", "LONGVARBINARY");
+
+ info.setDefaultSize(Types.CHAR, 254);
+ info.setDefaultSize(Types.VARCHAR, 254);
+
+ // no support for specifying the size for these types (because they are
+ // mapped
+ // to BYTEA which back-maps to BLOB)
+ info.setHasSize(Types.BINARY, false);
+ info.setHasSize(Types.VARBINARY, false);
+
+ setDelimitedIdentifierModeOn(true);
+ info.setNonBlankCharColumnSpacePadded(true);
+ info.setBlankCharColumnSpacePadded(true);
+ info.setCharColumnSpaceTrimmed(false);
+ info.setEmptyStringNulled(false);
+
+ primaryKeyViolationSqlStates = new String[] {"23505"};
+
+ ddlReader = new PostgreSqlDdlReader(log, this);
+ ddlBuilder = new PostgreSqlBuilder(log, this);
+ }
+
+ public String getName() {
+ return DATABASENAME;
+ }
+
+ public String getDefaultSchema() {
+ if (StringUtils.isBlank(defaultSchema)) {
+ defaultSchema = (String) getSqlTemplate().queryForObject("select current_schema()", String.class);
+ }
+ return defaultSchema;
+ }
+
+ public String getDefaultCatalog() {
+ return null;
+ }
+
+
+ @Override
+ protected Array createArray(Column column, final String value) {
+ if (StringUtils.isNotBlank(value)) {
+
+ String jdbcTypeName = column.getJdbcTypeName();
+ if (jdbcTypeName.startsWith("_")) {
+ jdbcTypeName = jdbcTypeName.substring(1);
+ }
+ int jdbcBaseType = Types.VARCHAR;
+ if (jdbcTypeName.toLowerCase().contains("int")) {
+ jdbcBaseType = Types.INTEGER;
+ }
+
+ final String baseTypeName = jdbcTypeName;
+ final int baseType = jdbcBaseType;
+ return new Array() {
+ public String getBaseTypeName() {
+ return baseTypeName;
+ }
+
+ public void free() throws SQLException {
+ }
+
+ public int getBaseType() {
+ return baseType;
+ }
+
+ public Object getArray() {
+ return null;
+ }
+
+ public Object getArray(Map> map) {
+ return null;
+ }
+
+ public Object getArray(long index, int count) {
+ return null;
+ }
+
+ public Object getArray(long index, int count, Map> map) {
+ return null;
+ }
+
+ public ResultSet getResultSet() {
+ return null;
+ }
+
+ public ResultSet getResultSet(Map> map) {
+ return null;
+ }
+
+ public ResultSet getResultSet(long index, int count) {
+ return null;
+ }
+
+ public ResultSet getResultSet(long index, int count, Map> map) {
+ return null;
+ }
+
+ public String toString() {
+ return value;
+ }
+ };
+ } else {
+ return null;
+ }
+ }
+
+ @Override
+ protected String cleanTextForTextBasedColumns(String text) {
+ return text.replace("\0", "");
+ }
+
+
+ @Override
+ public Object[] getObjectValues(BinaryEncoding encoding, String[] values,
+ Column[] orderedMetaData) {
+
+ Object[] objectValues = super.getObjectValues(encoding, values, orderedMetaData);
+ for (int i = 0; i < orderedMetaData.length; i++) {
+ if (orderedMetaData[i] != null && orderedMetaData[i].getTypeCode() == Types.BLOB
+ && objectValues[i] != null) {
+ try {
+ objectValues[i] = new SerialBlob((byte[]) objectValues[i]);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+ return objectValues;
+ }
+
+ @Override
+ public DmlStatement createDmlStatement(DmlType dmlType, String catalogName, String schemaName,
+ String tableName, Column[] keys, Column[] columns) {
+ return new PostgresDmlStatement(dmlType, catalogName, schemaName, tableName, keys, columns,
+ getPlatformInfo().isDateOverridesToTimestamp(),
+ getPlatformInfo().getIdentifierQuoteString());
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/PostgresDmlStatement.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/PostgresDmlStatement.java
new file mode 100644
index 0000000000..77bf56b0c3
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/PostgresDmlStatement.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to JumpMind Inc under one or more contributor
+ * license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding
+ * copyright ownership. JumpMind Inc licenses this file
+ * to you under the GNU Lesser General Public License (the
+ * "License"); you may not use this file except in compliance
+ * with the License.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, see
+ * .
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.jumpmind.db.platform.postgresql;
+
+import org.apache.commons.lang.ArrayUtils;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.sql.DmlStatement;
+
+public class PostgresDmlStatement extends DmlStatement {
+
+ public PostgresDmlStatement(DmlType type, String catalogName, String schemaName,
+ String tableName, Column[] keys, Column[] columns, boolean isDateOverrideToTimestamp,
+ String identifierQuoteString) {
+ super(type, catalogName, schemaName, tableName, keys, columns, isDateOverrideToTimestamp,
+ identifierQuoteString);
+ }
+
+ @Override
+ public String buildInsertSql(String tableName, Column[] keyColumns, Column[] columns) {
+ if (keyColumns != null && keyColumns.length > 0 && keyColumns[0] != null) {
+ StringBuilder sql = new StringBuilder("insert into ");
+ sql.append(tableName);
+ sql.append("(");
+ int columnCount = appendColumns(sql, columns);
+ sql.append(") (select ");
+ appendColumnQuestions(sql, columnCount);
+ sql.append(" where (select 1 from ");
+ sql.append(tableName);
+ sql.append(" where ");
+ if (keyColumns == null || keyColumns.length == 0) {
+ sql.append("1 != 1");
+ } else {
+ appendColumnEquals(sql, keyColumns, " and ");
+ }
+ sql.append(") is null)");
+ return sql.toString();
+ } else {
+ return super.buildInsertSql(tableName, keys, columns);
+ }
+ }
+
+ @Override
+ public Column[] getMetaData() {
+ if (dmlType == DmlType.INSERT) {
+ return getColumnKeyMetaData();
+ } else {
+ return super.getMetaData();
+ }
+ }
+
+ @Override
+ public String[] getValueArray(String[] columnValues, String[] keyValues) {
+ if (dmlType == DmlType.INSERT) {
+ return (String[]) ArrayUtils.addAll(columnValues, keyValues);
+ } else {
+ return super.getValueArray(columnValues, keyValues);
+ }
+ }
+
+ @Override
+ protected int[] buildTypes(Column[] keys, Column[] columns, boolean isDateOverrideToTimestamp) {
+ if (dmlType == DmlType.INSERT) {
+ int[] columnTypes = buildTypes(columns, isDateOverrideToTimestamp);
+ int[] keyTypes = buildTypes(keys, isDateOverrideToTimestamp);
+ return ArrayUtils.addAll(columnTypes, keyTypes);
+ } else {
+ return super.buildTypes(keys, columns, isDateOverrideToTimestamp);
+ }
+ }
+
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/package.html b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/package.html
new file mode 100644
index 0000000000..b0ecd1688d
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/postgresql/package.html
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+
+
+ This package contains the platform implementation for the
+ PostgreSQL database.
+
+
+
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sqlite/SqLiteBuilder.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sqlite/SqLiteBuilder.java
new file mode 100644
index 0000000000..483c561079
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sqlite/SqLiteBuilder.java
@@ -0,0 +1,271 @@
+/*
+ * To change this template, choose Tools | Templates
+ * and open the template in the editor.
+ */
+
+package org.jumpmind.db.platform.sqlite;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.Writer;
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.ListIterator;
+
+import org.apache.commons.lang.StringUtils;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.alter.AddColumnChange;
+import org.jumpmind.db.alter.ColumnChange;
+import org.jumpmind.db.alter.ColumnDataTypeChange;
+import org.jumpmind.db.alter.ColumnRequiredChange;
+import org.jumpmind.db.alter.ColumnSizeChange;
+import org.jumpmind.db.alter.RemoveColumnChange;
+import org.jumpmind.db.alter.TableChange;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.ModelException;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractDdlBuilder;
+import org.jumpmind.log.Log;
+
+/*
+ * The SQL Builder for the SqlLite database.
+ */
+public class SqLiteBuilder extends AbstractDdlBuilder {
+
+ public SqLiteBuilder(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+
+ addEscapedCharSequence("'", "''");
+ }
+
+ @Override
+ public void dropTable(Table table, StringBuilder ddl) {
+ ddl.append("DROP TABLE IF EXISTS ");
+ printIdentifier(getTableName(table.getName()), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ protected void writeColumnAutoIncrementStmt(Table table, Column column, StringBuilder ddl) {
+ if (!column.isPrimaryKey()) {
+ ddl.append("PRIMARY KEY AUTOINCREMENT");
+ }
+ }
+
+ @Override
+ public String getSelectLastIdentityValues(Table table) {
+ return "CALL IDENTITY()";
+ }
+
+ @Override
+ public void writeExternalIndexDropStmt(Table table, IIndex index, StringBuilder ddl) {
+ ddl.append("DROP INDEX IF EXISTS ");
+ printIdentifier(getIndexName(index), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ protected void processTableStructureChanges(Database currentModel, Database desiredModel,
+ Collection changes, StringBuilder ddl) {
+ // Only drop columns that are not part of a primary key
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = (TableChange) changeIt.next();
+
+ if ((change instanceof RemoveColumnChange)
+ && ((RemoveColumnChange) change).getColumn().isPrimaryKey()) {
+ return;
+ }
+ }
+
+ // in order to utilize the ALTER TABLE ADD COLUMN BEFORE statement
+ // we have to apply the add column changes in the correct order
+ // thus we first gather all add column changes and then execute them
+ // Since we get them in target table column order, we can simply
+ // iterate backwards
+ ArrayList addColumnChanges = new ArrayList();
+
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = (TableChange) changeIt.next();
+ if (change instanceof AddColumnChange) {
+ addColumnChanges.add(change);
+ changeIt.remove();
+ }
+ }
+
+ for (ListIterator changeIt = addColumnChanges.listIterator(addColumnChanges.size()); changeIt
+ .hasPrevious();) {
+ AddColumnChange addColumnChange = (AddColumnChange) changeIt.previous();
+ processChange(currentModel, desiredModel, addColumnChange, ddl);
+ changeIt.remove();
+ }
+
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = (TableChange) changeIt.next();
+ if (change instanceof RemoveColumnChange) {
+ RemoveColumnChange removeColumnChange = (RemoveColumnChange) change;
+ processChange(currentModel, desiredModel, removeColumnChange, ddl);
+ changeIt.remove();
+ }
+ }
+
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = (TableChange) changeIt.next();
+ if (change instanceof ColumnChange) {
+ boolean needsAlter = true;
+ if (change instanceof ColumnDataTypeChange) {
+ ColumnDataTypeChange dataTypeChange = (ColumnDataTypeChange) change;
+ if (dataTypeChange.getChangedColumn().getTypeCode() == Types.DECIMAL
+ && dataTypeChange.getNewTypeCode() == Types.NUMERIC) {
+ needsAlter = false;
+ }
+ if (dataTypeChange.getChangedColumn().getTypeCode() == Types.SMALLINT
+ && dataTypeChange.getNewTypeCode() == Types.TINYINT) {
+ needsAlter = false;
+ }
+ if (dataTypeChange.getChangedColumn().getTypeCode() == Types.VARCHAR
+ && dataTypeChange.getNewTypeCode() == Types.LONGVARCHAR) {
+ needsAlter = false;
+ }
+ }
+ if (change instanceof ColumnSizeChange) {
+ ColumnSizeChange sizeChange = (ColumnSizeChange) change;
+ if (sizeChange.getNewScale() == 0 && sizeChange.getNewSize() == 0) {
+ needsAlter = false;
+ } else if (sizeChange.getNewSize() == sizeChange.getChangedColumn()
+ .getSizeAsInt()
+ && sizeChange.getNewScale() == sizeChange.getChangedColumn().getScale()) {
+ needsAlter = false;
+ }
+ }
+ if (needsAlter) {
+ processAlterColumn(currentModel, (ColumnChange) change, ddl);
+ }
+ changeIt.remove();
+ }
+ }
+
+ }
+
+ protected void processAlterColumn(Database currentModel, ColumnChange columnChange,
+ StringBuilder ddl) {
+ columnChange.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(columnChange.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ALTER COLUMN ");
+ if (columnChange instanceof ColumnRequiredChange) {
+ ColumnRequiredChange columnRequiredChange = (ColumnRequiredChange) columnChange;
+ printlnIdentifier(getColumnName(columnChange.getChangedColumn()), ddl);
+ printIndent(ddl);
+ if (columnRequiredChange.getChangedColumn().isRequired()) {
+ ddl.append(" SET NOT NULL ");
+ } else {
+ ddl.append(" SET NULL ");
+ }
+ } else {
+ writeColumn(columnChange.getChangedTable(), columnChange.getChangedColumn(), ddl);
+ }
+ printEndOfStatement(ddl);
+ }
+
+ /*
+ * Processes the addition of a column to a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ AddColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD COLUMN ");
+ writeColumn(change.getChangedTable(), change.getNewColumn(), ddl);
+ if (change.getNextColumn() != null) {
+ ddl.append(" BEFORE ");
+ printIdentifier(getColumnName(change.getNextColumn()), ddl);
+ }
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a column from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemoveColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP COLUMN ");
+ printIdentifier(getColumnName(change.getColumn()), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ @Override
+ protected void writeColumnDefaultValueStmt(Table table, Column column, StringBuilder ddl) {
+ Object parsedDefault = column.getParsedDefaultValue();
+
+ if (parsedDefault != null) {
+ if (!platform.getPlatformInfo().isDefaultValuesForLongTypesSupported()
+ && ((column.getTypeCode() == Types.LONGVARBINARY) || (column.getTypeCode() == Types.LONGVARCHAR))) {
+ throw new ModelException(
+ "The platform does not support default values for LONGVARCHAR or LONGVARBINARY columns");
+ }
+ // we write empty default value strings only if the type is not a
+ // numeric or date/time type
+ if (isValidDefaultValue(column.getDefaultValue(), column.getTypeCode())) {
+ ddl.append(" DEFAULT ");
+ writeColumnDefaultValue(table, column, ddl);
+ }
+ } else if (platform.getPlatformInfo().isDefaultValueUsedForIdentitySpec()
+ && column.isAutoIncrement()) {
+ ddl.append(" DEFAULT ");
+ writeColumnDefaultValue(table, column, ddl);
+ } else if (!StringUtils.isBlank(column.getDefaultValue())) {
+ ddl.append(" DEFAULT ");
+ writeColumnDefaultValue(table, column, ddl);
+ }
+ }
+
+ @Override
+ protected void printDefaultValue(Object defaultValue, int typeCode, StringBuilder ddl) {
+ if (defaultValue != null) {
+ String defaultValueStr = defaultValue.toString();
+ boolean shouldUseQuotes = !TypeMap.isNumericType(typeCode)
+ && !defaultValueStr.startsWith("TO_DATE(")
+ && !defaultValue.equals("CURRENT_TIMESTAMP")
+ && !defaultValue.equals("CURRENT_TIME") && !defaultValue.equals("CURRENT_DATE");
+ ;
+
+ if (shouldUseQuotes) {
+ // characters are only escaped when within a string literal
+ ddl.append(platform.getPlatformInfo().getValueQuoteToken());
+ ddl.append(escapeStringValue(defaultValueStr));
+ ddl.append(platform.getPlatformInfo().getValueQuoteToken());
+ } else {
+ ddl.append(defaultValueStr);
+ }
+ }
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sqlite/SqLiteDdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sqlite/SqLiteDdlReader.java
new file mode 100644
index 0000000000..ecc3a81cc8
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sqlite/SqLiteDdlReader.java
@@ -0,0 +1,94 @@
+package org.jumpmind.db.platform.sqlite;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractJdbcDdlReader;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.db.platform.MetaDataColumnDescriptor;
+import org.jumpmind.log.Log;
+
+/*
+ * Reads a database model from a SQLite database.
+ */
+public class SqLiteDdlReader extends AbstractJdbcDdlReader {
+
+ public SqLiteDdlReader(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ setDefaultCatalogPattern(null);
+ setDefaultSchemaPattern(null);
+ }
+
+ protected Collection readForeignKeys(DatabaseMetaDataWrapper metaData, String tableName) throws SQLException {
+ // TODO
+ return new ArrayList();
+ }
+
+ protected Collection readIndices(DatabaseMetaDataWrapper metaData, String tableName) throws SQLException {
+ // TODO
+ return new ArrayList();
+ }
+
+ /* Below here copied from H2. May still need tweaking */
+
+ @Override
+ @SuppressWarnings("unchecked")
+ protected Column readColumn(DatabaseMetaDataWrapper metaData, Map values) throws SQLException {
+ Column column = super.readColumn(metaData, values);
+ if (values.get("CHARACTER_MAXIMUM_LENGTH") != null) {
+ column.setSize(values.get("CHARACTER_MAXIMUM_LENGTH").toString());
+ }
+ if (values.get("COLUMN_DEFAULT") != null) {
+ column.setDefaultValue(values.get("COLUMN_DEFAULT").toString());
+ }
+ if (values.get("NUMERIC_SCALE") != null) {
+ column.setScale((Integer) values.get("NUMERIC_SCALE"));
+ }
+ if (TypeMap.isTextType(column.getTypeCode()) && (column.getDefaultValue() != null)) {
+ column.setDefaultValue(unescape(column.getDefaultValue(), "'", "''"));
+ }
+ return column;
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ protected List initColumnsForColumn() {
+ List result = new ArrayList();
+ result.add(new MetaDataColumnDescriptor("COLUMN_DEF", 12));
+ result.add(new MetaDataColumnDescriptor("COLUMN_DEFAULT", 12));
+ result.add(new MetaDataColumnDescriptor("TABLE_NAME", 12));
+ result.add(new MetaDataColumnDescriptor("COLUMN_NAME", 12));
+ result.add(new MetaDataColumnDescriptor("DATA_TYPE", 4, new Integer(1111)));
+ result.add(new MetaDataColumnDescriptor("NUM_PREC_RADIX", 4, new Integer(10)));
+ result.add(new MetaDataColumnDescriptor("DECIMAL_DIGITS", 4, new Integer(0)));
+ result.add(new MetaDataColumnDescriptor("NUMERIC_SCALE", 4, new Integer(0)));
+ result.add(new MetaDataColumnDescriptor("COLUMN_SIZE", 12));
+ result.add(new MetaDataColumnDescriptor("CHARACTER_MAXIMUM_LENGTH", 12));
+ result.add(new MetaDataColumnDescriptor("IS_NULLABLE", 12, "YES"));
+ result.add(new MetaDataColumnDescriptor("REMARKS", 12));
+ return result;
+ }
+
+ @Override
+ protected boolean isInternalForeignKeyIndex(Connection connection, DatabaseMetaDataWrapper metaData, Table table, ForeignKey fk,
+ IIndex index) {
+ String name = index.getName();
+ return name != null && name.startsWith("CONSTRAINT_INDEX_");
+ }
+
+ @Override
+ protected boolean isInternalPrimaryKeyIndex(Connection connection, DatabaseMetaDataWrapper metaData, Table table, IIndex index) {
+ String name = index.getName();
+ return name != null && name.startsWith("PRIMARY_KEY_");
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sqlite/SqLitePlatform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sqlite/SqLitePlatform.java
new file mode 100644
index 0000000000..1e03f977e3
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sqlite/SqLitePlatform.java
@@ -0,0 +1,96 @@
+package org.jumpmind.db.platform.sqlite;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+
+import javax.sql.DataSource;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.platform.AbstractJdbcDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.log.Log;
+
+/*
+ * The platform implementation for the SQLite database.
+ */
+public class SqLitePlatform extends AbstractJdbcDatabasePlatform implements IDatabasePlatform {
+
+ /* Database name of this platform. */
+ public static final String DATABASENAME = "SQLite3";
+
+ /* The standard H2 driver. */
+ public static final String JDBC_DRIVER = "org.sqlite.JDBC";
+
+ /*
+ * Creates a new instance of the H2 platform.
+ */
+ public SqLitePlatform(DataSource dataSource,DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+
+ info.setNonPKIdentityColumnsSupported(false);
+ info.setIdentityOverrideAllowed(false);
+ info.setSystemForeignKeyIndicesAlwaysNonUnique(true);
+ info.setNullAsDefaultValueRequired(false);
+ info.addNativeTypeMapping(Types.ARRAY, "BINARY", Types.BINARY);
+ info.addNativeTypeMapping(Types.DISTINCT, "BINARY", Types.BINARY);
+ info.addNativeTypeMapping(Types.NULL, "BINARY", Types.BINARY);
+ info.addNativeTypeMapping(Types.REF, "BINARY", Types.BINARY);
+ info.addNativeTypeMapping(Types.STRUCT, "BINARY", Types.BINARY);
+ info.addNativeTypeMapping(Types.DATALINK, "BINARY", Types.BINARY);
+
+ info.addNativeTypeMapping(Types.BIT, "BOOLEAN", Types.BIT);
+ info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.TINYINT);
+ info.addNativeTypeMapping(Types.SMALLINT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping(Types.BINARY, "BINARY", Types.BINARY);
+ info.addNativeTypeMapping(Types.BLOB, "BLOB", Types.BLOB);
+ info.addNativeTypeMapping(Types.CLOB, "CLOB", Types.CLOB);
+ info.addNativeTypeMapping(Types.FLOAT, "DOUBLE", Types.DOUBLE);
+ info.addNativeTypeMapping(Types.JAVA_OBJECT, "OTHER");
+
+ info.setDefaultSize(Types.CHAR, Integer.MAX_VALUE);
+ info.setDefaultSize(Types.VARCHAR, Integer.MAX_VALUE);
+ info.setDefaultSize(Types.BINARY, Integer.MAX_VALUE);
+ info.setDefaultSize(Types.VARBINARY, Integer.MAX_VALUE);
+
+ info.setNonBlankCharColumnSpacePadded(false);
+ info.setBlankCharColumnSpacePadded(false);
+ info.setCharColumnSpaceTrimmed(true);
+ info.setEmptyStringNulled(false);
+
+
+ ddlReader = new SqLiteDdlReader(log, this);
+ ddlBuilder = new SqLiteBuilder(log, this);
+ }
+
+ public String getName() {
+ return DATABASENAME;
+ }
+
+
+ public String getDefaultCatalog() {
+ return null;
+ }
+
+ public String getDefaultSchema() {
+ return null;
+ }
+
+}
\ No newline at end of file
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sybase/SybaseBuilder.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sybase/SybaseBuilder.java
new file mode 100644
index 0000000000..b00f022ba7
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sybase/SybaseBuilder.java
@@ -0,0 +1,510 @@
+package org.jumpmind.db.platform.sybase;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.alter.AddColumnChange;
+import org.jumpmind.db.alter.AddPrimaryKeyChange;
+import org.jumpmind.db.alter.ColumnAutoIncrementChange;
+import org.jumpmind.db.alter.ColumnChange;
+import org.jumpmind.db.alter.ColumnDefaultValueChange;
+import org.jumpmind.db.alter.IModelChange;
+import org.jumpmind.db.alter.PrimaryKeyChange;
+import org.jumpmind.db.alter.RemoveColumnChange;
+import org.jumpmind.db.alter.RemovePrimaryKeyChange;
+import org.jumpmind.db.alter.TableChange;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.Database;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.platform.AbstractDdlBuilder;
+import org.jumpmind.db.platform.PlatformUtils;
+import org.jumpmind.log.Log;
+
+/*
+ * The SQL Builder for Sybase.
+ */
+public class SybaseBuilder extends AbstractDdlBuilder {
+
+ public SybaseBuilder(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ addEscapedCharSequence("'", "''");
+ }
+
+ @Override
+ public void createTable(Table table, StringBuilder ddl) {
+ writeQuotationOnStatement(ddl);
+ super.createTable(table, ddl);
+ }
+
+ @Override
+ protected void writeColumn(Table table, Column column, StringBuilder ddl) {
+ printIdentifier(getColumnName(column), ddl);
+ ddl.append(" ");
+ ddl.append(getSqlType(column));
+ writeColumnDefaultValueStmt(table, column, ddl);
+ // Sybase does not like NULL/NOT NULL and IDENTITY together
+ if (column.isAutoIncrement()) {
+ ddl.append(" ");
+ writeColumnAutoIncrementStmt(table, column, ddl);
+ } else {
+ ddl.append(" ");
+ if (column.isRequired()) {
+ writeColumnNotNullableStmt(ddl);
+ } else {
+ // we'll write a NULL for all columns that are not required
+ writeColumnNullableStmt(ddl);
+ }
+ }
+ }
+
+ @Override
+ protected String getNativeDefaultValue(Column column) {
+ if ((column.getTypeCode() == Types.BIT)
+ || (PlatformUtils.supportsJava14JdbcTypes() && (column.getTypeCode() == PlatformUtils
+ .determineBooleanTypeCode()))) {
+ return getDefaultValueHelper().convert(column.getDefaultValue(), column.getTypeCode(),
+ Types.SMALLINT).toString();
+ } else {
+ return super.getNativeDefaultValue(column);
+ }
+ }
+
+ @Override
+ public void dropTable(Table table, StringBuilder ddl) {
+ writeQuotationOnStatement(ddl);
+ ddl.append("IF EXISTS (SELECT 1 FROM sysobjects WHERE type = 'U' AND name = ");
+ printAlwaysSingleQuotedIdentifier(getTableName(table.getName()), ddl);
+ println(")", ddl);
+ println("BEGIN", ddl);
+ printIndent(ddl);
+ ddl.append("DROP TABLE ");
+ printlnIdentifier(getTableName(table.getName()), ddl);
+ ddl.append("END");
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ protected void writeExternalForeignKeyDropStmt(Table table, ForeignKey foreignKey,
+ StringBuilder ddl) {
+ String constraintName = getForeignKeyName(table, foreignKey);
+
+ ddl.append("IF EXISTS (SELECT 1 FROM sysobjects WHERE type = 'RI' AND name = ");
+ printAlwaysSingleQuotedIdentifier(constraintName, ddl);
+ println(")", ddl);
+ printIndent(ddl);
+ ddl.append("ALTER TABLE ");
+ printIdentifier(getTableName(table.getName()), ddl);
+ ddl.append(" DROP CONSTRAINT ");
+ printIdentifier(constraintName, ddl);
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ public void writeExternalIndexDropStmt(Table table, IIndex index, StringBuilder ddl) {
+ ddl.append("DROP INDEX ");
+ printIdentifier(getTableName(table.getName()), ddl);
+ ddl.append(".");
+ printIdentifier(getIndexName(index), ddl);
+ printEndOfStatement(ddl);
+ }
+
+ @Override
+ public void dropExternalForeignKeys(Table table, StringBuilder ddl) {
+ writeQuotationOnStatement(ddl);
+ super.dropExternalForeignKeys(table, ddl);
+ }
+
+ @Override
+ public String getSelectLastIdentityValues(Table table) {
+ return "SELECT @@IDENTITY";
+ }
+
+ /*
+ * Returns the SQL to enable identity override mode.
+ *
+ * @param table The table to enable the mode for
+ *
+ * @return The SQL
+ */
+ protected String getEnableIdentityOverrideSql(Table table) {
+ StringBuffer result = new StringBuffer();
+
+ result.append("SET IDENTITY_INSERT ");
+ result.append(getDelimitedIdentifier(getTableName(table.getName())));
+ result.append(" ON");
+
+ return result.toString();
+ }
+
+ /*
+ * Returns the SQL to disable identity override mode.
+ *
+ * @param table The table to disable the mode for
+ *
+ * @return The SQL
+ */
+ protected String getDisableIdentityOverrideSql(Table table) {
+ StringBuffer result = new StringBuffer();
+
+ result.append("SET IDENTITY_INSERT ");
+ result.append(getDelimitedIdentifier(getTableName(table.getName())));
+ result.append(" OFF");
+
+ return result.toString();
+ }
+
+ /*
+ * Returns the statement that turns on the ability to write delimited
+ * identifiers.
+ *
+ * @return The quotation-on statement
+ */
+ protected String getQuotationOnStatement() {
+ if (platform.isDelimitedIdentifierModeOn()) {
+ return "SET quoted_identifier on";
+ } else {
+ return "";
+ }
+ }
+
+ /*
+ * Writes the statement that turns on the ability to write delimited
+ * identifiers.
+ */
+ private void writeQuotationOnStatement(StringBuilder ddl) {
+ ddl.append(getQuotationOnStatement());
+ printEndOfStatement(ddl);
+ }
+
+ /*
+ * Prints the given identifier with enforced single quotes around it
+ * regardless of whether delimited identifiers are turned on or not.
+ *
+ * @param identifier The identifier
+ */
+ private void printAlwaysSingleQuotedIdentifier(String identifier, StringBuilder ddl) {
+ ddl.append("'");
+ ddl.append(identifier);
+ ddl.append("'");
+ }
+
+ @Override
+ public void writeCopyDataStatement(Table sourceTable, Table targetTable, StringBuilder ddl) {
+ boolean hasIdentity = targetTable.getAutoIncrementColumns().length > 0;
+
+ if (hasIdentity) {
+ ddl.append("SET IDENTITY_INSERT ");
+ printIdentifier(getTableName(targetTable.getName()), ddl);
+ ddl.append(" ON");
+ printEndOfStatement(ddl);
+ }
+ super.writeCopyDataStatement(sourceTable, targetTable, ddl);
+ if (hasIdentity) {
+ ddl.append("SET IDENTITY_INSERT ");
+ printIdentifier(getTableName(targetTable.getName()), ddl);
+ ddl.append(" OFF");
+ printEndOfStatement(ddl);
+ }
+ }
+
+ @Override
+ protected void writeCastExpression(Column sourceColumn, Column targetColumn, StringBuilder ddl) {
+ String sourceNativeType = getBareNativeType(sourceColumn);
+ String targetNativeType = getBareNativeType(targetColumn);
+
+ if (sourceNativeType.equals(targetNativeType)) {
+ printIdentifier(getColumnName(sourceColumn), ddl);
+ } else {
+ ddl.append("CONVERT(");
+ ddl.append(getNativeType(targetColumn));
+ ddl.append(",");
+ printIdentifier(getColumnName(sourceColumn), ddl);
+ ddl.append(")");
+ }
+ }
+
+ @Override
+ protected void processChanges(Database currentModel, Database desiredModel,
+ List changes, StringBuilder ddl) {
+ if (!changes.isEmpty()) {
+ writeQuotationOnStatement(ddl);
+ }
+ super.processChanges(currentModel, desiredModel, changes, ddl);
+ }
+
+ @Override
+ protected void processTableStructureChanges(Database currentModel, Database desiredModel,
+ Table sourceTable, Table targetTable, List changes, StringBuilder ddl) {
+ // First we drop primary keys as necessary
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = (TableChange) changeIt.next();
+
+ if (change instanceof RemovePrimaryKeyChange) {
+ processChange(currentModel, desiredModel, (RemovePrimaryKeyChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof PrimaryKeyChange) {
+ PrimaryKeyChange pkChange = (PrimaryKeyChange) change;
+ RemovePrimaryKeyChange removePkChange = new RemovePrimaryKeyChange(
+ pkChange.getChangedTable(), pkChange.getOldPrimaryKeyColumns());
+
+ processChange(currentModel, desiredModel, removePkChange, ddl);
+ }
+ }
+
+ HashMap columnChanges = new HashMap();
+
+ // Next we add/remove columns
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = (TableChange) changeIt.next();
+
+ if (change instanceof AddColumnChange) {
+ AddColumnChange addColumnChange = (AddColumnChange) change;
+
+ // Sybase can only add not insert columns
+ if (addColumnChange.isAtEnd()) {
+ processChange(currentModel, desiredModel, addColumnChange, ddl);
+ changeIt.remove();
+ }
+ } else if (change instanceof RemoveColumnChange) {
+ processChange(currentModel, desiredModel, (RemoveColumnChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof ColumnAutoIncrementChange) {
+ // Sybase has no way of adding or removing an IDENTITY
+ // constraint
+ // Thus we have to rebuild the table anyway and can ignore all
+ // the other
+ // column changes
+ columnChanges = null;
+ } else if ((change instanceof ColumnChange) && (columnChanges != null)) {
+ // we gather all changed columns because we can use the ALTER
+ // TABLE ALTER COLUMN
+ // statement for them
+ ColumnChange columnChange = (ColumnChange) change;
+ ArrayList changesPerColumn = (ArrayList) columnChanges.get(columnChange
+ .getChangedColumn());
+
+ if (changesPerColumn == null) {
+ changesPerColumn = new ArrayList();
+ columnChanges.put(columnChange.getChangedColumn(), changesPerColumn);
+ }
+ changesPerColumn.add(change);
+ }
+ }
+ if (columnChanges != null) {
+ for (Iterator changesPerColumnIt = columnChanges.entrySet().iterator(); changesPerColumnIt
+ .hasNext();) {
+ Map.Entry entry = (Map.Entry) changesPerColumnIt.next();
+ Column sourceColumn = (Column) entry.getKey();
+ ArrayList changesPerColumn = (ArrayList) entry.getValue();
+
+ // Sybase does not like us to use the ALTER TABLE ALTER
+ // statement if we don't actually
+ // change the datatype or the required constraint but only the
+ // default value
+ // Thus, if we only have to change the default, we use a
+ // different handler
+ if ((changesPerColumn.size() == 1)
+ && (changesPerColumn.get(0) instanceof ColumnDefaultValueChange)) {
+ processChange(currentModel, desiredModel,
+ (ColumnDefaultValueChange) changesPerColumn.get(0), ddl);
+ } else {
+ Column targetColumn = targetTable.findColumn(sourceColumn.getName(),
+ platform.isDelimitedIdentifierModeOn());
+
+ processColumnChange(sourceTable, targetTable, sourceColumn, targetColumn, ddl);
+ }
+ for (Iterator changeIt = changesPerColumn.iterator(); changeIt.hasNext();) {
+ ((ColumnChange) changeIt.next()).apply(currentModel,
+ platform.isDelimitedIdentifierModeOn());
+ }
+ }
+ }
+ // Finally we add primary keys
+ for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) {
+ TableChange change = (TableChange) changeIt.next();
+
+ if (change instanceof AddPrimaryKeyChange) {
+ processChange(currentModel, desiredModel, (AddPrimaryKeyChange) change, ddl);
+ changeIt.remove();
+ } else if (change instanceof PrimaryKeyChange) {
+ PrimaryKeyChange pkChange = (PrimaryKeyChange) change;
+ AddPrimaryKeyChange addPkChange = new AddPrimaryKeyChange(
+ pkChange.getChangedTable(), pkChange.getNewPrimaryKeyColumns());
+
+ processChange(currentModel, desiredModel, addPkChange, ddl);
+ changeIt.remove();
+ }
+ }
+ }
+
+ /*
+ * Processes the addition of a column to a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ AddColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("ADD ");
+ writeColumn(change.getChangedTable(), change.getNewColumn(), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a column from a table.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemoveColumnChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("DROP ");
+ printIdentifier(getColumnName(change.getColumn()), ddl);
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the removal of a primary key from a table.
+ *
+ * @param currentModel The current database schema
+ *
+ * @param desiredModel The desired database schema
+ *
+ * @param change The change object
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ RemovePrimaryKeyChange change, StringBuilder ddl) {
+ // TODO: this would be easier when named primary keys are supported
+ // because then we can use ALTER TABLE DROP
+ String tableName = getTableName(change.getChangedTable().getName());
+ String tableNameVar = "tn" + createUniqueIdentifier();
+ String constraintNameVar = "cn" + createUniqueIdentifier();
+
+ println("BEGIN", ddl);
+ println(" DECLARE @" + tableNameVar + " nvarchar(60), @" + constraintNameVar
+ + " nvarchar(60)", ddl);
+ println(" WHILE EXISTS(SELECT sysindexes.name", ddl);
+ println(" FROM sysindexes, sysobjects", ddl);
+ ddl.append(" WHERE sysobjects.name = ");
+ printAlwaysSingleQuotedIdentifier(tableName, ddl);
+ println(" AND sysobjects.id = sysindexes.id AND (sysindexes.status & 2048) > 0)", ddl);
+ println(" BEGIN", ddl);
+ println(" SELECT @" + tableNameVar + " = sysobjects.name, @" + constraintNameVar
+ + " = sysindexes.name", ddl);
+ println(" FROM sysindexes, sysobjects", ddl);
+ ddl.append(" WHERE sysobjects.name = ");
+ printAlwaysSingleQuotedIdentifier(tableName, ddl);
+ ddl.append(" AND sysobjects.id = sysindexes.id AND (sysindexes.status & 2048) > 0");
+ println(" EXEC ('ALTER TABLE '+@" + tableNameVar + "+' DROP CONSTRAINT '+@"
+ + constraintNameVar + ")", ddl);
+ println(" END", ddl);
+ ddl.append("END");
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes the change of the default value of a column. Note that this
+ * method is only used if it is the only change to that column.
+ */
+ protected void processChange(Database currentModel, Database desiredModel,
+ ColumnDefaultValueChange change, StringBuilder ddl) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(change.getChangedTable().getName()), ddl);
+ printIndent(ddl);
+ ddl.append("REPLACE ");
+ printIdentifier(getColumnName(change.getChangedColumn()), ddl);
+
+ Table curTable = currentModel.findTable(change.getChangedTable().getName(),
+ platform.isDelimitedIdentifierModeOn());
+ Column curColumn = curTable.findColumn(change.getChangedColumn().getName(),
+ platform.isDelimitedIdentifierModeOn());
+
+ ddl.append(" DEFAULT ");
+ if (isValidDefaultValue(change.getNewDefaultValue(), curColumn.getTypeCode())) {
+ printDefaultValue(change.getNewDefaultValue(), curColumn.getTypeCode(), ddl);
+ } else {
+ ddl.append("NULL");
+ }
+ printEndOfStatement(ddl);
+ change.apply(currentModel, platform.isDelimitedIdentifierModeOn());
+ }
+
+ /*
+ * Processes a change to a column.
+ */
+ protected void processColumnChange(Table sourceTable, Table targetTable, Column sourceColumn,
+ Column targetColumn, StringBuilder ddl) {
+ Object oldParsedDefault = sourceColumn.getParsedDefaultValue();
+ Object newParsedDefault = targetColumn.getParsedDefaultValue();
+ String newDefault = targetColumn.getDefaultValue();
+ boolean defaultChanges = ((oldParsedDefault == null) && (newParsedDefault != null))
+ || ((oldParsedDefault != null) && !oldParsedDefault.equals(newParsedDefault));
+
+ // Sybase does not like it if there is a default spec in the ALTER TABLE
+ // ALTER
+ // statement; thus we have to change the default afterwards
+ if (newDefault != null) {
+ targetColumn.setDefaultValue(null);
+ }
+ if (defaultChanges) {
+ // we're first removing the default as it might make problems when
+ // the
+ // datatype changes
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(sourceTable.getName()), ddl);
+ printIndent(ddl);
+ ddl.append("REPLACE ");
+ printIdentifier(getColumnName(sourceColumn), ddl);
+ ddl.append(" DEFAULT NULL");
+ printEndOfStatement(ddl);
+ }
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(sourceTable.getName()), ddl);
+ printIndent(ddl);
+ ddl.append("MODIFY ");
+ writeColumn(sourceTable, targetColumn, ddl);
+ printEndOfStatement(ddl);
+ if (defaultChanges) {
+ ddl.append("ALTER TABLE ");
+ printlnIdentifier(getTableName(sourceTable.getName()), ddl);
+ printIndent(ddl);
+ ddl.append("REPLACE ");
+ printIdentifier(getColumnName(sourceColumn), ddl);
+ if (newDefault != null) {
+ writeColumnDefaultValueStmt(sourceTable, targetColumn, ddl);
+ } else {
+ ddl.append(" DEFAULT NULL");
+ }
+ printEndOfStatement(ddl);
+ }
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sybase/SybaseDdlReader.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sybase/SybaseDdlReader.java
new file mode 100644
index 0000000000..dde6729e25
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sybase/SybaseDdlReader.java
@@ -0,0 +1,259 @@
+package org.jumpmind.db.platform.sybase;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.Date;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Map;
+
+import org.apache.oro.text.regex.MalformedPatternException;
+import org.apache.oro.text.regex.Pattern;
+import org.apache.oro.text.regex.PatternCompiler;
+import org.apache.oro.text.regex.PatternMatcher;
+import org.apache.oro.text.regex.Perl5Compiler;
+import org.apache.oro.text.regex.Perl5Matcher;
+import org.jumpmind.db.DdlException;
+import org.jumpmind.db.IDatabasePlatform;
+import org.jumpmind.db.model.Column;
+import org.jumpmind.db.model.ForeignKey;
+import org.jumpmind.db.model.IIndex;
+import org.jumpmind.db.model.Reference;
+import org.jumpmind.db.model.Table;
+import org.jumpmind.db.model.TypeMap;
+import org.jumpmind.db.platform.AbstractJdbcDdlReader;
+import org.jumpmind.db.platform.DatabaseMetaDataWrapper;
+import org.jumpmind.log.Log;
+
+/*
+ * Reads a database model from a Sybase database.
+ */
+public class SybaseDdlReader extends AbstractJdbcDdlReader {
+ /* The regular expression pattern for the ISO dates. */
+ private Pattern _isoDatePattern;
+
+ /* The regular expression pattern for the ISO times. */
+ private Pattern _isoTimePattern;
+
+ public SybaseDdlReader(Log log, IDatabasePlatform platform) {
+ super(log, platform);
+ setDefaultCatalogPattern(null);
+ setDefaultSchemaPattern(null);
+ setDefaultTablePattern("%");
+
+ PatternCompiler compiler = new Perl5Compiler();
+
+ try {
+ _isoDatePattern = compiler.compile("'(\\d{4}\\-\\d{2}\\-\\d{2})'");
+ _isoTimePattern = compiler.compile("'(\\d{2}:\\d{2}:\\d{2})'");
+ } catch (MalformedPatternException ex) {
+ throw new DdlException(ex);
+ }
+ }
+
+ @Override
+ protected Table readTable(Connection connection, DatabaseMetaDataWrapper metaData, Map values)
+ throws SQLException {
+ Table table = super.readTable(connection, metaData, values);
+
+ if (table != null) {
+ // Sybase does not return the auto-increment status via the database
+ // metadata
+ determineAutoIncrementFromResultSetMetaData(connection, table, table.getColumns());
+ }
+ return table;
+ }
+
+ @Override
+ protected Integer overrideJdbcTypeForColumn(Map values) {
+ String typeName = (String) values.get("TYPE_NAME");
+ if (typeName != null && typeName.toUpperCase().startsWith("TEXT")) {
+ return Types.LONGVARCHAR;
+ } else {
+ return super.overrideJdbcTypeForColumn(values);
+ }
+ }
+
+ @Override
+ protected Column readColumn(DatabaseMetaDataWrapper metaData, Map values) throws SQLException {
+ Column column = super.readColumn(metaData, values);
+
+ if ((column.getTypeCode() == Types.DECIMAL) && (column.getSizeAsInt() == 19)
+ && (column.getScale() == 0)) {
+ // Back-mapping to BIGINT
+ column.setTypeCode(Types.BIGINT);
+ } else if (column.getDefaultValue() != null) {
+ if (column.getTypeCode() == Types.TIMESTAMP) {
+ // Sybase maintains the default values for DATE/TIME jdbc types,
+ // so we have to
+ // migrate the default value to TIMESTAMP
+ PatternMatcher matcher = new Perl5Matcher();
+ Timestamp timestamp = null;
+
+ if (matcher.matches(column.getDefaultValue(), _isoDatePattern)) {
+ timestamp = new Timestamp(Date.valueOf(matcher.getMatch().group(1)).getTime());
+ } else if (matcher.matches(column.getDefaultValue(), _isoTimePattern)) {
+ timestamp = new Timestamp(Time.valueOf(matcher.getMatch().group(1)).getTime());
+ }
+ if (timestamp != null) {
+ column.setDefaultValue(timestamp.toString());
+ }
+ } else if (TypeMap.isTextType(column.getTypeCode())) {
+ column.setDefaultValue(unescape(column.getDefaultValue(), "'", "''"));
+ }
+ }
+ return column;
+ }
+
+ @Override
+ protected void readIndex(DatabaseMetaDataWrapper metaData, Map values, Map knownIndices)
+ throws SQLException {
+ if (getPlatform().isDelimitedIdentifierModeOn()) {
+ String indexName = (String) values.get("INDEX_NAME");
+
+ // Sometimes, Sybase keeps the delimiter quotes around the index
+ // names
+ // when returning them in the metadata, so we strip them
+ if (indexName != null) {
+ String delimiter = getPlatformInfo().getDelimiterToken();
+
+ if ((indexName != null) && indexName.startsWith(delimiter)
+ && indexName.endsWith(delimiter)) {
+ indexName = indexName.substring(delimiter.length(), indexName.length()
+ - delimiter.length());
+ values.put("INDEX_NAME", indexName);
+ }
+ }
+ }
+ super.readIndex(metaData, values, knownIndices);
+ }
+
+ @Override
+ protected Collection readForeignKeys(Connection connection, DatabaseMetaDataWrapper metaData,
+ String tableName) throws SQLException {
+ // Sybase (or jConnect) does not return the foreign key names, thus we
+ // have to
+ // read the foreign keys manually from the system tables
+ StringBuffer query = new StringBuffer();
+
+ query.append("SELECT refobjs.name, localtables.id, remotetables.name, remotetables.id");
+ for (int idx = 1; idx <= 16; idx++) {
+ query.append(", refs.fokey");
+ query.append(idx);
+ query.append(", refs.refkey");
+ query.append(idx);
+ }
+ query.append(" FROM sysreferences refs, sysobjects refobjs, sysobjects localtables, sysobjects remotetables");
+ query.append(" WHERE refobjs.type = 'RI' AND refs.constrid = refobjs.id AND");
+ query.append(" localtables.type = 'U' AND refs.tableid = localtables.id AND localtables.name = '");
+ query.append(tableName);
+ query.append("' AND remotetables.type = 'U' AND refs.reftabid = remotetables.id");
+
+ Statement stmt = connection.createStatement();
+ PreparedStatement prepStmt = connection
+ .prepareStatement("SELECT name FROM syscolumns WHERE id = ? AND colid = ?");
+ ArrayList result = new ArrayList();
+
+ try {
+ ResultSet fkRs = stmt.executeQuery(query.toString());
+
+ while (fkRs.next()) {
+ ForeignKey fk = new ForeignKey(fkRs.getString(1));
+ int localTableId = fkRs.getInt(2);
+ int remoteTableId = fkRs.getInt(4);
+
+ fk.setForeignTableName(fkRs.getString(3));
+ for (int idx = 0; idx < 16; idx++) {
+ short fkColIdx = fkRs.getShort(5 + idx + idx);
+ short pkColIdx = fkRs.getShort(6 + idx + idx);
+ Reference ref = new Reference();
+
+ if (fkColIdx == 0) {
+ break;
+ }
+
+ prepStmt.setInt(1, localTableId);
+ prepStmt.setShort(2, fkColIdx);
+
+ ResultSet colRs = prepStmt.executeQuery();
+
+ if (colRs.next()) {
+ ref.setLocalColumnName(colRs.getString(1));
+ }
+ colRs.close();
+
+ prepStmt.setInt(1, remoteTableId);
+ prepStmt.setShort(2, pkColIdx);
+
+ colRs = prepStmt.executeQuery();
+
+ if (colRs.next()) {
+ ref.setForeignColumnName(colRs.getString(1));
+ }
+ colRs.close();
+
+ fk.addReference(ref);
+ }
+ result.add(fk);
+ }
+
+ fkRs.close();
+ } finally {
+ stmt.close();
+ prepStmt.close();
+ }
+
+ return result;
+ }
+
+ @Override
+ protected boolean isInternalPrimaryKeyIndex(Connection connection,
+ DatabaseMetaDataWrapper metaData, Table table, IIndex index) throws SQLException {
+ // We can simply check the sysindexes table where a specific flag is set
+ // for pk indexes
+ StringBuffer query = new StringBuffer();
+
+ query.append("SELECT name = sysindexes.name FROM sysindexes, sysobjects WHERE sysobjects.name = '");
+ query.append(table.getName());
+ query.append("' AND sysindexes.name = '");
+ query.append(index.getName());
+ query.append("' AND sysobjects.id = sysindexes.id AND (sysindexes.status & 2048) > 0");
+
+ Statement stmt = connection.createStatement();
+
+ try {
+ ResultSet rs = stmt.executeQuery(query.toString());
+ boolean result = rs.next();
+
+ rs.close();
+ return result;
+ } finally {
+ stmt.close();
+ }
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sybase/SybasePlatform.java b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sybase/SybasePlatform.java
new file mode 100644
index 0000000000..61463ea647
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sybase/SybasePlatform.java
@@ -0,0 +1,122 @@
+package org.jumpmind.db.platform.sybase;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.Types;
+
+import javax.sql.DataSource;
+
+import org.apache.commons.lang.StringUtils;
+import org.jumpmind.db.platform.AbstractJdbcDatabasePlatform;
+import org.jumpmind.db.platform.DatabasePlatformSettings;
+import org.jumpmind.log.Log;
+
+/*
+ * The platform implementation for Sybase.
+ */
+public class SybasePlatform extends AbstractJdbcDatabasePlatform {
+
+ /* Database name of this platform. */
+ public static final String DATABASENAME = "Sybase";
+
+ /* The standard Sybase jdbc driver. */
+ public static final String JDBC_DRIVER = "com.sybase.jdbc2.jdbc.SybDriver";
+
+ /* The old Sybase jdbc driver. */
+ public static final String JDBC_DRIVER_OLD = "com.sybase.jdbc.SybDriver";
+
+ /* The subprotocol used by the standard Sybase driver. */
+ public static final String JDBC_SUBPROTOCOL = "sybase:Tds";
+
+ /* The maximum size that text and binary columns can have. */
+ public static final long MAX_TEXT_SIZE = 2147483647;
+
+ public SybasePlatform(DataSource dataSource, DatabasePlatformSettings settings, Log log) {
+ super(dataSource, settings, log);
+
+ info.setMaxIdentifierLength(128);
+ info.setNullAsDefaultValueRequired(true);
+ info.setCommentPrefix("/*");
+ info.setCommentSuffix("*/");
+ info.setDelimiterToken("\"");
+ setDelimitedIdentifierModeOn(true);
+
+ info.addNativeTypeMapping(Types.ARRAY, "IMAGE");
+ // BIGINT is mapped back in the model reader
+ info.addNativeTypeMapping(Types.BIGINT, "DECIMAL(19,0)");
+ // we're not using the native BIT type because it is rather limited
+ // (cannot be NULL, cannot be indexed)
+ info.addNativeTypeMapping(Types.BIT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping(Types.BLOB, "IMAGE", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.CLOB, "TEXT", Types.LONGVARCHAR);
+ info.addNativeTypeMapping(Types.DATE, "DATETIME", Types.TIMESTAMP);
+ info.addNativeTypeMapping(Types.DISTINCT, "IMAGE", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.DOUBLE, "DOUBLE PRECISION");
+ info.addNativeTypeMapping(Types.FLOAT, "DOUBLE PRECISION", Types.DOUBLE);
+ info.addNativeTypeMapping(Types.INTEGER, "INT");
+ info.addNativeTypeMapping(Types.JAVA_OBJECT, "IMAGE", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.LONGVARBINARY, "IMAGE");
+ info.addNativeTypeMapping(Types.LONGVARCHAR, "TEXT");
+ info.addNativeTypeMapping(Types.NULL, "IMAGE", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.OTHER, "IMAGE", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.REF, "IMAGE", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.STRUCT, "IMAGE", Types.LONGVARBINARY);
+ info.addNativeTypeMapping(Types.TIME, "DATETIME", Types.TIMESTAMP);
+ info.addNativeTypeMapping(Types.TIMESTAMP, "DATETIME", Types.TIMESTAMP);
+ info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.SMALLINT);
+ info.addNativeTypeMapping("BOOLEAN", "SMALLINT", "SMALLINT");
+ info.addNativeTypeMapping("DATALINK", "IMAGE", "LONGVARBINARY");
+
+ info.setDefaultSize(Types.BINARY, 254);
+ info.setDefaultSize(Types.VARBINARY, 254);
+ info.setDefaultSize(Types.CHAR, 254);
+ info.setDefaultSize(Types.VARCHAR, 254);
+
+ info.setDateOverridesToTimestamp(true);
+ info.setNonBlankCharColumnSpacePadded(true);
+ info.setBlankCharColumnSpacePadded(true);
+ info.setCharColumnSpaceTrimmed(false);
+ info.setEmptyStringNulled(false);
+ info.setAutoIncrementUpdateAllowed(false);
+
+ primaryKeyViolationCodes = new int[] {423,511,515,530,547,2601,2615,2714};
+ ddlReader = new SybaseDdlReader(log, this);
+ ddlBuilder = new SybaseBuilder(log, this);
+ }
+
+ public String getName() {
+ return DATABASENAME;
+ }
+
+ public String getDefaultCatalog() {
+ if (StringUtils.isBlank(defaultCatalog)) {
+ defaultCatalog = getSqlTemplate().queryForObject("select DB_NAME()", String.class);
+ }
+ return defaultCatalog;
+ }
+
+ public String getDefaultSchema() {
+ if (StringUtils.isBlank(defaultSchema)) {
+ defaultSchema = (String) getSqlTemplate().queryForObject("select USER_NAME()",
+ String.class);
+ }
+ return defaultSchema;
+ }
+}
diff --git a/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sybase/package.html b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sybase/package.html
new file mode 100644
index 0000000000..65f05607fb
--- /dev/null
+++ b/symmetric/symmetric-jdbc/src/main/java/org/jumpmind/db/platform/sybase/package.html
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+
+
+ This package contains the platform implementation for the
+ Sybase ASE database.
+