Skip to content

Commit

Permalink
0002152: DDL errors on DB2 i5 AS/400
Browse files Browse the repository at this point in the history
  • Loading branch information
chenson42 committed Jan 28, 2015
1 parent 126b196 commit 7a7b0e0
Show file tree
Hide file tree
Showing 13 changed files with 169 additions and 77 deletions.
Expand Up @@ -270,7 +270,8 @@ protected void init() {
this.parameterService = new ParameterService(platform, propertiesFactory,
properties.get(ParameterConstants.RUNTIME_CONFIG_TABLE_PREFIX, "sym"));

boolean parameterTableExists = this.platform.getTableFromCache(TableConstants.getTableName(properties.get(ParameterConstants.RUNTIME_CONFIG_TABLE_PREFIX), TableConstants.SYM_PARAMETER), false) != null;
boolean parameterTableExists = this.platform.readTableFromDatabase(null, null,
TableConstants.getTableName(properties.get(ParameterConstants.RUNTIME_CONFIG_TABLE_PREFIX), TableConstants.SYM_PARAMETER), true) != null;
if (parameterTableExists) {
this.parameterService.setDatabaseHasBeenInitialized(true);
this.parameterService.rereadParameters();
Expand Down Expand Up @@ -716,21 +717,23 @@ public synchronized void stop() {
nodeCommunicationService.stop();
}

List<ProcessInfo> infos = getStatisticManager().getProcessInfos();
for (ProcessInfo processInfo : infos) {
Thread thread = processInfo.getThread();
if (processInfo.getStatus() != Status.OK && thread.isAlive()) {
log.info("Trying to interrupt thread '{}' ", thread.getName());
try {
thread.interrupt();
} catch (Exception e) {
log.info("Caught exception while attempting to interrupt thread", e);
if (statisticManager != null) {
List<ProcessInfo> infos = statisticManager.getProcessInfos();
for (ProcessInfo processInfo : infos) {
Thread thread = processInfo.getThread();
if (processInfo.getStatus() != Status.OK && thread.isAlive()) {
log.info("Trying to interrupt thread '{}' ", thread.getName());
try {
thread.interrupt();
} catch (Exception e) {
log.info("Caught exception while attempting to interrupt thread", e);
}
}
}

Thread.interrupted();
}

Thread.interrupted();

started = false;
starting = false;
}
Expand Down Expand Up @@ -808,7 +811,7 @@ public boolean isConfigured() {
false) || StringUtils.isNotBlank(getParameterService().getString(
ParameterConstants.AUTO_CONFIGURE_REG_SVR_SQL_SCRIPT)));

Table symNodeTable = symmetricDialect.getPlatform().getTableFromCache(
Table symNodeTable = symmetricDialect.getPlatform().readTableFromDatabase(null, null,
TableConstants.getTableName(parameterService.getTablePrefix(),
TableConstants.SYM_NODE), true);

Expand Down
Expand Up @@ -19,6 +19,8 @@
* under the License.
*/

import static org.apache.commons.lang.StringUtils.isNotBlank;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
Expand Down Expand Up @@ -218,8 +220,8 @@ public Database readDatabase(String catalog, String schema, String[] tableTypes)
public Database readFromDatabase(Table... tables) {
Database fromDb = new Database();
for (Table tableFromXml : tables) {
Table tableFromDatabase = getTableFromCache(getDefaultCatalog(),
getDefaultSchema(), tableFromXml.getName(), true);
Table tableFromDatabase = getTableFromCache(tableFromXml.getCatalog(),
tableFromXml.getSchema(), tableFromXml.getName(), true);
if (tableFromDatabase != null) {
fromDb.addTable(tableFromDatabase);
}
Expand All @@ -235,29 +237,34 @@ public Table readTableFromDatabase(String catalogName, String schemaName, String
public Table readTableFromDatabase(String catalogName, String schemaName, String tableName, boolean useDefaultSchema) {
String originalFullyQualifiedName = Table.getFullyQualifiedTableName(catalogName,
schemaName, tableName);
catalogName = catalogName == null && useDefaultSchema ? getDefaultCatalog() : catalogName;
schemaName = schemaName == null && useDefaultSchema ? getDefaultSchema() : schemaName;
Table table = ddlReader.readTable(catalogName, schemaName, tableName);
String defaultedCatalogName = catalogName == null && useDefaultSchema ? getDefaultCatalog() : catalogName;
String defaultedSchemaName = schemaName == null && useDefaultSchema ? getDefaultSchema() : schemaName;

Table table = ddlReader.readTable(defaultedCatalogName, defaultedSchemaName, tableName);
if (table == null && metadataIgnoreCase) {

IDdlReader reader = getDdlReader();

List<String> catalogNames = reader.getCatalogNames();
if (catalogNames != null) {
for (String name : catalogNames) {
if (name != null && name.equalsIgnoreCase(catalogName)) {
catalogName = name;
break;
if (isNotBlank(catalogName)) {
List<String> catalogNames = reader.getCatalogNames();
if (catalogNames != null) {
for (String name : catalogNames) {
if (name != null && name.equalsIgnoreCase(catalogName)) {
catalogName = name;
break;
}
}
}
}
}

List<String> schemaNames = reader.getSchemaNames(catalogName);
if (schemaNames != null) {
for (String name : schemaNames) {
if (name != null && name.equalsIgnoreCase(schemaName)) {
schemaName = name;
break;

if (isNotBlank(schemaName)) {
List<String> schemaNames = reader.getSchemaNames(catalogName);
if (schemaNames != null) {
for (String name : schemaNames) {
if (name != null && name.equalsIgnoreCase(schemaName)) {
schemaName = name;
break;
}
}
}
}
Expand Down Expand Up @@ -308,9 +315,6 @@ public Table getTableFromCache(String catalogName, String schemaName, String tab
synchronized (this.getClass()) {
try {
Table table = readTableFromDatabase(catalogName, schemaName, tableName, true);
if (table == null) {
table = readTableFromDatabase(catalogName, schemaName, tableName, false);
}
tableCache.put(key, table);
retTable = table;
} catch (RuntimeException ex) {
Expand Down
Expand Up @@ -592,6 +592,10 @@ protected void processChange(Database currentModel, Database desiredModel,
writeExternalIndexCreateStmt(change.getChangedTable(), change.getNewIndex(), ddl);
change.apply(currentModel, delimitedIdentifierModeOn);
}

protected void filterChanges(Collection<TableChange> changes) {

}

/**
* Processes the changes to the structure of tables.
Expand All @@ -605,6 +609,9 @@ protected void processChange(Database currentModel, Database desiredModel,
*/
protected void processTableStructureChanges(Database currentModel, Database desiredModel,
Collection<TableChange> changes, StringBuilder ddl) {

filterChanges(changes);

LinkedHashMap<String, List<TableChange>> changesPerTable = new LinkedHashMap<String, List<TableChange>>();
LinkedHashMap<String, List<ForeignKey>> unchangedFKs = new LinkedHashMap<String, List<ForeignKey>>();
boolean caseSensitive = delimitedIdentifierModeOn;
Expand Down Expand Up @@ -878,7 +885,7 @@ protected void processTableStructureChanges(Database currentModel, Database desi
if (change instanceof ColumnDataTypeChange) {
ColumnDataTypeChange typeChange = (ColumnDataTypeChange)change;
if (typeChange.getNewTypeCode() == Types.BIGINT) {
if (writeAlterColumnDataType(typeChange, ddl)) {
if (writeAlterColumnDataTypeToBigInt(typeChange, ddl)) {
it.remove();
}
}
Expand All @@ -898,8 +905,8 @@ protected void processChange(Database currentModel, Database desiredModel,
change.apply(currentModel, delimitedIdentifierModeOn);

}

protected boolean writeAlterColumnDataType(ColumnDataTypeChange change, StringBuilder ddl) {
protected boolean writeAlterColumnDataTypeToBigInt(ColumnDataTypeChange change, StringBuilder ddl) {
return false;
}

Expand Down
@@ -1,4 +1,5 @@
package org.jumpmind.db.platform.db2;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
Expand All @@ -18,12 +19,80 @@
* under the License.
*/

import java.sql.Types;
import java.util.Collection;
import java.util.Iterator;

import org.jumpmind.db.alter.ColumnDataTypeChange;
import org.jumpmind.db.alter.TableChange;
import org.jumpmind.db.model.Column;
import org.jumpmind.db.model.TypeMap;
import org.jumpmind.db.platform.DatabaseNamesConstants;

public class Db2As400DdlBuilder extends Db2DdlBuilder {

public Db2As400DdlBuilder() {
this.databaseName = DatabaseNamesConstants.DB2AS400;
}
public Db2As400DdlBuilder() {
this.databaseName = DatabaseNamesConstants.DB2AS400;
databaseInfo.setRequiresAutoCommitForDdl(true);
}

@Override
protected void filterChanges(Collection<TableChange> changes) {
super.filterChanges(changes);
Iterator<TableChange> i = changes.iterator();
while (i.hasNext()) {
TableChange tableChange = i.next();
if (tableChange instanceof ColumnDataTypeChange) {
ColumnDataTypeChange change = (ColumnDataTypeChange)tableChange;
if (change.getNewTypeCode() == Types.LONGVARCHAR &&
change.getChangedColumn().getJdbcTypeCode() == Types.VARCHAR) {
log.debug("Not processing the detect type change to LONGVARCHAR because "
+ "a create of a long varchar results in a variable length VARCHAR field");
i.remove();
}
}
}
}

@Override
protected void writeCastExpression(Column sourceColumn, Column targetColumn, StringBuilder ddl) {
String sourceNativeType = getBareNativeType(sourceColumn);
String targetNativeType = getBareNativeType(targetColumn);

if (sourceNativeType.equals(targetNativeType)) {
printIdentifier(getColumnName(sourceColumn), ddl);
} else {
String type = getSqlType(targetColumn);
if ("LONG VARCHAR".equals(type)) {
type = "VARCHAR";
}

if ("VARCHAR".equals(type)) {
type = type + "(" + sourceColumn.getSizeAsInt() + ")";
}

/*
* DB2 has the limitation that it cannot convert numeric values to
* VARCHAR, though it can convert them to CHAR
*/
if (TypeMap.isNumericType(sourceColumn.getMappedTypeCode())
&& "VARCHAR".equalsIgnoreCase(targetNativeType)) {
Object sizeSpec = targetColumn.getSize();

if (sizeSpec == null) {
sizeSpec = databaseInfo.getDefaultSize(targetColumn.getMappedTypeCode());
}
type = "CHAR(" + sizeSpec.toString() + ")";

}

ddl.append("CAST(");
printIdentifier(getColumnName(sourceColumn), ddl);
ddl.append(" AS ");
ddl.append(type);
ddl.append(")");

}
}

}
Expand Up @@ -211,7 +211,7 @@ protected void processTableStructureChanges(Database currentModel, Database desi
}

@Override
protected boolean writeAlterColumnDataType(ColumnDataTypeChange change, StringBuilder ddl) {
protected boolean writeAlterColumnDataTypeToBigInt(ColumnDataTypeChange change, StringBuilder ddl) {
if (!change.getChangedColumn().isPrimaryKey()) {
writeTableAlterStmt(change.getChangedTable(), ddl);
ddl.append(" ALTER COLUMN ");
Expand Down
Expand Up @@ -315,7 +315,7 @@ protected void processTableStructureChanges(Database currentModel, Database desi
}

@Override
protected boolean writeAlterColumnDataType(ColumnDataTypeChange change, StringBuilder ddl) {
protected boolean writeAlterColumnDataTypeToBigInt(ColumnDataTypeChange change, StringBuilder ddl) {
Table table = change.getChangedTable();
Column column = change.getChangedColumn();
if (column.isPrimaryKey()) {
Expand Down
Expand Up @@ -195,7 +195,7 @@ protected void writeColumnAutoIncrementStmt(Table table, Column column, StringBu
}

@Override
protected boolean writeAlterColumnDataType(ColumnDataTypeChange change, StringBuilder ddl) {
protected boolean writeAlterColumnDataTypeToBigInt(ColumnDataTypeChange change, StringBuilder ddl) {
change.getChangedColumn().setTypeCode(change.getNewTypeCode());
writeAlterColumn(change.getChangedTable(), change.getChangedColumn(), ddl);
return true;
Expand Down
Expand Up @@ -102,11 +102,6 @@ protected boolean shouldGeneratePrimaryKeys(Column[] primaryKeyColumns) {
return true;
}
}

@Override
protected boolean writeAlterColumnDataType(ColumnDataTypeChange change, StringBuilder ddl) {
return false;
}

@Override
protected void processTableStructureChanges(Database currentModel, Database desiredModel,
Expand Down Expand Up @@ -136,7 +131,7 @@ protected void processTableStructureChanges(Database currentModel, Database desi
&& dataTypeChange.getNewTypeCode() == Types.LONGVARCHAR) {
changeIt.remove();
} else if (dataTypeChange.getNewTypeCode() == Types.BIGINT) {
if (writeAlterColumnDataType(dataTypeChange, ddl)) {
if (writeAlterColumnDataTypeToBigInt(dataTypeChange, ddl)) {
changeIt.remove();
}
}
Expand Down
Expand Up @@ -470,7 +470,7 @@ protected void processChange(Database currentModel, Database desiredModel,
}

@Override
protected boolean writeAlterColumnDataType(ColumnDataTypeChange change, StringBuilder ddl) {
protected boolean writeAlterColumnDataTypeToBigInt(ColumnDataTypeChange change, StringBuilder ddl) {
writeTableAlterStmt(change.getChangedTable(), ddl);
ddl.append("MODIFY (");
Column column = change.getChangedColumn();
Expand Down
Expand Up @@ -125,7 +125,7 @@ protected void dropTable(Table table, StringBuilder ddl, boolean temporary, bool
}

@Override
protected boolean writeAlterColumnDataType(ColumnDataTypeChange change, StringBuilder ddl) {
protected boolean writeAlterColumnDataTypeToBigInt(ColumnDataTypeChange change, StringBuilder ddl) {
writeTableAlterStmt(change.getChangedTable(), ddl);
ddl.append(" ALTER COLUMN ");
Column column = change.getChangedColumn();
Expand Down
Expand Up @@ -541,6 +541,7 @@ public int compare(Table obj1, Table obj2) {

public Table readTable(final String catalog, final String schema, final String table) {
try {
log.debug("reading table: " + table);
JdbcSqlTemplate sqlTemplate = (JdbcSqlTemplate) platform.getSqlTemplate();
return postprocessTableFromDatabase(sqlTemplate.execute(new IConnectionCallback<Table>() {
public Table execute(Connection connection) throws SQLException {
Expand All @@ -552,7 +553,9 @@ public Table execute(Connection connection) throws SQLException {

ResultSet tableData = null;
try {
log.debug("getting table metadata for " + table);
tableData = metaData.getTables(getTableNamePattern(table));
log.debug("done getting table metadata for " + table);
if (tableData != null && tableData.next()) {
Map<String, Object> values = readMetaData(tableData, initColumnsForTable());
return readTable(connection, metaData, values);
Expand Down Expand Up @@ -1365,6 +1368,7 @@ public List<String> execute(Connection connection) throws SQLException {
DatabaseMetaData meta = connection.getMetaData();
ResultSet rs = null;
try {

rs = meta.getSchemas();
while (rs.next()) {
int columnCount = rs.getMetaData().getColumnCount();
Expand Down

0 comments on commit 7a7b0e0

Please sign in to comment.