Permalink
Browse files

Added support for using Oracle DBMS with diffa.

- Removed dependency on SQL scripts for building each version of the schema.
- Delegated schema name information to DialectExtension
- Improved readability of version detection.
- Inlined calls to applyReferenceData and applyConstraint in
  HibernateMigrationSteps
- Added support for altering the null/not null constraint on a column
  independent of other changes so as to address the migration bug: column
  escalations.origin required to be nullable, from version 14 onwards.
-- Both SQL standard-compliant and Oracle support
- Added tests for generating correct SQL for (standard+Oracle):
-- dropping foreign key constraints
-- changing null/not null constraint on a column
- Bug fix: generated sequence names were not as hibernate expected. Corrected
- Made all tests configurable between supported databases (Oracle, HSQLDB)
  • Loading branch information...
Anthony Harris
Anthony Harris committed Feb 23, 2012
1 parent 6aed591 commit 52c21bce821e22c7e12a5e39decc7cfb6eeb503f
Showing with 130 additions and 863 deletions.
  1. +2 −1 .gitignore
  2. +7 −0 hibernate-migrations/src/main/java/net/lshift/hibernate/migrations/AlterTableBuilder.java
  3. +10 −1 hibernate-migrations/src/main/java/net/lshift/hibernate/migrations/dialects/DialectExtension.java
  4. +11 −0 ...ate-migrations/src/main/java/net/lshift/hibernate/migrations/dialects/OracleDialectExtension.java
  5. +43 −0 hibernate-migrations/src/test/java/net/lshift/hibernate/migrations/AlterTableBuilderTest.java
  6. +17 −0 ...e-migrations/src/test/java/net/lshift/hibernate/migrations/dialects/OracleDialectSupportTest.java
  7. +2 −2 ...src/main/resources/net/lshift/diffa/kernel/config/procedures/sync_pair_diff_partitions.oracle.sql
  8. +31 −43 kernel/src/main/scala/net/lshift/diffa/kernel/config/HibernateConfigStorePreparationStep.scala
  9. +1 −1 kernel/src/main/scala/net/lshift/diffa/kernel/hooks/OracleDifferencePartitioningHook.scala
  10. +0 −23 kernel/src/test/resources/net/lshift/diffa/kernel/config/v0-config-db.sql
  11. +0 −21 kernel/src/test/resources/net/lshift/diffa/kernel/config/v1-config-db.sql
  12. +0 −45 kernel/src/test/resources/net/lshift/diffa/kernel/config/v10-config-db.sql
  13. +0 −52 kernel/src/test/resources/net/lshift/diffa/kernel/config/v11-config-db.sql
  14. +0 −52 kernel/src/test/resources/net/lshift/diffa/kernel/config/v12-config-db.sql
  15. +0 −52 kernel/src/test/resources/net/lshift/diffa/kernel/config/v13-config-db.sql
  16. +0 −54 kernel/src/test/resources/net/lshift/diffa/kernel/config/v14-config-db.sql
  17. +0 −54 kernel/src/test/resources/net/lshift/diffa/kernel/config/v15-config-db.sql
  18. +0 −55 kernel/src/test/resources/net/lshift/diffa/kernel/config/v16-config-db.sql
  19. +0 −55 kernel/src/test/resources/net/lshift/diffa/kernel/config/v17-config-db.sql
  20. +0 −57 kernel/src/test/resources/net/lshift/diffa/kernel/config/v19-config-db.sql
  21. +0 −22 kernel/src/test/resources/net/lshift/diffa/kernel/config/v2-config-db.sql
  22. +0 −34 kernel/src/test/resources/net/lshift/diffa/kernel/config/v3-config-db.sql
  23. +0 −34 kernel/src/test/resources/net/lshift/diffa/kernel/config/v4-config-db.sql
  24. +0 −34 kernel/src/test/resources/net/lshift/diffa/kernel/config/v5-config-db.sql
  25. +0 −35 kernel/src/test/resources/net/lshift/diffa/kernel/config/v6-config-db.sql
  26. +0 −42 kernel/src/test/resources/net/lshift/diffa/kernel/config/v7-config-db.sql
  27. +0 −43 kernel/src/test/resources/net/lshift/diffa/kernel/config/v8-config-db.sql
  28. +0 −45 kernel/src/test/resources/net/lshift/diffa/kernel/config/v9-config-db.sql
  29. +2 −2 kernel/src/test/scala/net/lshift/diffa/kernel/config/TestDatabaseEnvironments.scala
  30. +4 −4 kernel/src/test/scala/net/lshift/diffa/kernel/util/DatabaseEnvironment.scala
View
@@ -32,4 +32,5 @@ target/*
akka.log
.DS_Store
-.class
+.class
+
@@ -68,6 +68,13 @@ public AlterTableBuilder alterColumn(String name, int sqlType, int length, boole
return this;
}
+ public AlterTableBuilder setColumnNullable(String name, boolean nullable) {
+ alterFragments.add(dialectExtension.alterColumnString() + " " +
+ maybeBracketTerm(name + dialectExtension.setColumnNullString() + String.format("%snull", nullable ? "" : "not "),
+ dialectExtension.shouldBracketAlterColumnStatement()));
+ return this;
+ }
+
private static Column buildColumnDefinition(String name, int sqlType, int length, boolean nullable, Object defaultVal) {
Column col = new Column(name);
col.setSqlTypeCode(sqlType);
@@ -15,6 +15,8 @@
*/
package net.lshift.hibernate.migrations.dialects;
+import org.hibernate.cfg.Environment;
+
import java.util.Map;
/**
@@ -23,7 +25,6 @@
*/
public abstract class DialectExtension {
-
public abstract String getDialectName() ;
/**
@@ -41,6 +42,10 @@ public String alterColumnString() {
public String addColumnString() {
return "add column";
}
+
+ public String setColumnNullString() {
+ return " set ";
+ }
/**
* Retrieves whether the column definition in an alter column statement should be bracketed.
@@ -105,4 +110,8 @@ public String analyzeTableString(String table) {
public boolean supportsPrimaryKeyReplace() {
return false;
}
+
+ public String schemaPropertyName() {
+ return Environment.DEFAULT_SCHEMA;
+ }
}
@@ -16,6 +16,7 @@
package net.lshift.hibernate.migrations.dialects;
import com.google.common.base.Joiner;
+import org.hibernate.cfg.Environment;
import java.util.Map;
@@ -37,6 +38,11 @@ public String alterColumnString() {
public String addColumnString() {
return "add";
}
+
+ @Override
+ public String setColumnNullString() {
+ return " ";
+ }
@Override
public boolean shouldBracketAlterColumnStatement() {
@@ -90,4 +96,9 @@ public boolean supportsAnalyze() {
public String analyzeTableString(String table) {
return String.format("analyze table %s compute statistics", table);
}
+
+ @Override
+ public String schemaPropertyName() {
+ return Environment.USER;
+ }
}
@@ -59,6 +59,20 @@ public void shouldGenerateAlterColumn() throws Exception {
mb.alterTable("foo").alterColumn("bar", Types.VARCHAR, 1024, false, "baz");
VerificationUtil.verifyMigrationBuilder(mb, "alter table foo alter column bar varchar(1024) default 'baz' not null");
}
+
+ @Test
+ public void shouldSetColumnNullable() throws Exception {
+ MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration());
+ mb.alterTable("foo").setColumnNullable("bar", true);
+ VerificationUtil.verifyMigrationBuilder(mb, "alter table foo alter column bar set null");
+ }
+
+ @Test
+ public void shouldSetColumnNotNull() throws Exception {
+ MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration());
+ mb.alterTable("foo").setColumnNullable("bar", false);
+ VerificationUtil.verifyMigrationBuilder(mb, "alter table foo alter column bar set not null");
+ }
@Test
public void shouldGenerateForeignKeyConstraint() throws Exception {
@@ -116,6 +130,20 @@ public void shouldGenerateDropConstraint() throws Exception {
verify(conn);
}
+ @Test
+ public void shouldGenerateDropForeignKey() throws Exception {
+ MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration());
+ mb.alterTable("foo").dropConstraint("FK80C74EA1C3C204DC");
+
+ Connection conn = createStrictMock(Connection.class);
+ expect(conn.prepareStatement("alter table foo drop constraint FK80C74EA1C3C204DC")).
+ andReturn(mockExecutablePreparedStatement());
+ replay(conn);
+
+ mb.apply(conn);
+ verify(conn);
+ }
+
@Test
public void shouldGenerateDropPrimaryKey() throws Exception {
MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration());
@@ -143,6 +171,21 @@ public void shouldGenerateAddPrimaryKey() throws Exception {
mb.apply(conn);
verify(conn);
}
+
+ @Test
+ public void shouldGenerateReplacePrimaryKey() throws Exception {
+ MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration());
+ mb.alterTable("foo").replacePrimaryKey("bar", "baz");
+ Connection conn = createStrictMock(Connection.class);
+ expect(conn.prepareStatement("alter table foo drop primary key")).
+ andReturn(mockExecutablePreparedStatement());
+ expect(conn.prepareStatement("alter table foo add primary key (bar, baz)")).
+ andReturn(mockExecutablePreparedStatement());
+ replay(conn);
+
+ mb.apply(conn);
+ verify(conn);
+ }
@Test
public void shouldGenerateAddPartitionForHashing() throws Exception {
@@ -17,6 +17,9 @@
import net.lshift.hibernate.migrations.HibernateHelper;
import net.lshift.hibernate.migrations.MigrationBuilder;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.dialect.Dialect;
+import org.junit.Ignore;
import org.junit.Test;
import java.sql.Types;
@@ -41,4 +44,18 @@ public void shouldAddColumnUsingWithoutColumnKeyword() throws Exception {
mb.alterTable("foo").addColumn("bar", Types.BIT, 1, true, 0);
verifyMigrationBuilder(mb, "alter table foo add bar number(1,0) default 0");
}
+
+ @Test
+ public void shouldSetColumnNullable() throws Exception {
+ MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration(HibernateHelper.ORACLE_DIALECT));
+ mb.alterTable("foo").setColumnNullable("bar", true);
+ verifyMigrationBuilder(mb, "alter table foo modify (bar null)");
+ }
+
+ @Test
+ public void shouldSetColumnNotNull() throws Exception {
+ MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration(HibernateHelper.ORACLE_DIALECT));
+ mb.alterTable("foo").setColumnNullable("bar", false);
+ verifyMigrationBuilder(mb, "alter table foo modify (bar not null)");
+ }
}
@@ -14,7 +14,7 @@ BEGIN
hex_string := rawtohex(utl_raw.cast_to_raw(h_string));
part_name := 'P_' || substr(hex_string, 0, 28);
- select count(*) into matching from all_tab_partitions where table_name=diffs_table_name and partition_name=part_name;
+ select count(*) into matching from user_tab_partitions where table_name=diffs_table_name and partition_name=part_name;
if matching = 0 then
dbms_output.enable;
@@ -24,4 +24,4 @@ BEGIN
end if;
end;
end loop;
-end;
+end;
@@ -30,6 +30,7 @@ import net.lshift.hibernate.migrations.MigrationBuilder
import scala.collection.JavaConversions._
import org.hibernate.`type`.IntegerType
import org.hibernate.dialect.{Oracle10gDialect, Dialect}
+import net.lshift.hibernate.migrations.dialects.DialectExtensionSelector
/**
* Preparation step to ensure that the configuration for the Hibernate Config Store is in place.
@@ -49,13 +50,15 @@ class HibernateConfigStorePreparationStep
*/
def prepare(sf: SessionFactory, config: Configuration) {
val version = detectVersion(sf, config)
- version match {
+ val nextVersion = version match {
case None =>
- log.info("Empty schema")
+ log.info("Empty schema detected; version 0 will initially be installed, followed by upgrade to latest")
+ 0
case Some(vsn) =>
log.info("Current schema version is %d".format(vsn))
+ vsn + 1
}
- val migrations = migrationSteps.slice(version.getOrElse(-1) + 1, migrationSteps.length)
+ val migrations = migrationSteps.slice(nextVersion, migrationSteps.length)
sf.withSession(s => {
s.doWork(new Work {
@@ -82,6 +85,7 @@ class HibernateConfigStorePreparationStep
}
}
log.info("Upgraded database to version %s (%s)".format(step.versionId, step.name))
+// migration.getStatements foreach { stmt => log.info("STEP: %s".format(stmt)) }
} catch {
case ex =>
println("Failed to prepare the database - attempted to execute the following statements for step " + step.versionId + ":")
@@ -108,16 +112,14 @@ class HibernateConfigStorePreparationStep
s.doWork(new Work {
def execute(connection: Connection) = {
val props = config.getProperties
- val dbMetadata = new DatabaseMetadata(connection, Dialect.getDialect(props))
+ val dialect = Dialect.getDialect(props)
+ val dialectExtension = DialectExtensionSelector.select(dialect)
+
+ val dbMetadata = new DatabaseMetadata(connection, dialect)
val defaultCatalog = props.getProperty(Environment.DEFAULT_CATALOG)
- val dialect = Dialect.getDialect(props)
- val defaultSchema =
- if (dialect.isInstanceOf[Oracle10gDialect])
- props.getProperty(Environment.USER) // This is required for Oracle support
- else
- props.getProperty(Environment.DEFAULT_SCHEMA) // This is probably required for MySQL/hsqldb
-
+ val defaultSchema = props.getProperty(dialectExtension.schemaPropertyName)
+
hasTable = (dbMetadata.getTableMetadata(tableName, defaultSchema, defaultCatalog, false) != null)
}
})
@@ -140,6 +142,7 @@ class HibernateConfigStorePreparationStep
else if (tableExists(sf, config, "config_options") ) {
//Prior to version 2 of the database, the schema version was kept in the ConfigOptions table
val query = "select opt_val from config_options where opt_key = 'configStore.schemaVersion'"
+
Some(sf.withSession(_.createSQLQuery(query).uniqueResult().asInstanceOf[String].toInt))
}
else {
@@ -274,8 +277,8 @@ object HibernateConfigStorePreparationStep {
column("opt_val", Types.VARCHAR, 255, false).
pk("opt_key")
- // Add standard reference data
- applyReferenceData(migration)
+ // Make sure the default domain is in the DB
+ migration.insert("domains").values(Map("name" -> Domain.DEFAULT_DOMAIN.name))
// create table members (domain_name varchar(255) not null, user_name varchar(255) not null, primary key (domain_name, user_name));
migration.createTable("members").
@@ -314,11 +317,6 @@ object HibernateConfigStorePreparationStep {
migration
}
-
- def applyReferenceData(migration:MigrationBuilder) {
- // Make sure the default domain is in the DB
- migration.insert("domains").values(Map("name" -> Domain.DEFAULT_DOMAIN.name))
- }
},
new HibernateMigrationStep {
@@ -328,7 +326,7 @@ object HibernateConfigStorePreparationStep {
val migration = new MigrationBuilder(config)
migration.alterTable("range_category_descriptor").
- addColumn("max_granularity", Types.VARCHAR, 255, true, null)
+ addColumn("max_granularity", Types.VARCHAR, 255, true, null)
migration
}
@@ -385,16 +383,14 @@ object HibernateConfigStorePreparationStep {
migration.alterTable("users").
addColumn("password_enc", Types.VARCHAR, 255, false, "LOCKED").
addColumn("superuser", Types.BIT, 1, false, 0)
- applyReferenceData(migration)
- migration
- }
- def applyReferenceData(migration:MigrationBuilder) {
migration.insert("users").
values(Map(
- "name" -> "guest", "email" -> "guest@diffa.io",
- "password_enc" -> "84983c60f7daadc1cb8698621f802c0d9f9a3c3c295c810748fb048115c186ec",
- "superuser" -> Boolean.box(true)))
+ "name" -> "guest", "email" -> "guest@diffa.io",
+ "password_enc" -> "84983c60f7daadc1cb8698621f802c0d9f9a3c3c295c810748fb048115c186ec",
+ "superuser" -> Boolean.box(true)))
+
+ migration
}
},
@@ -430,7 +426,12 @@ object HibernateConfigStorePreparationStep {
pk("oid").
withNativeIdentityGenerator()
- applyConstraint(migration)
+ // alter table diffs add constraint FK5AA9592F53F69C16 foreign key (pair, domain) references pair (pair_key, domain);
+ migration.alterTable("diffs")
+ .addForeignKey("FK5AA9592F53F69C16", Array("pair", "domain"), "pair", Array("pair_key", "domain"))
+
+ migration.alterTable("pending_diffs")
+ .addForeignKey("FK75E457E44AD37D84", Array("pair", "domain"), "pair", Array("pair_key", "domain"))
migration.createIndex("diff_last_seen", "diffs", "last_seen")
migration.createIndex("diff_detection", "diffs", "detected_at")
@@ -440,15 +441,6 @@ object HibernateConfigStorePreparationStep {
migration
}
-
- def applyConstraint(migration: MigrationBuilder) {
- // alter table diffs add constraint FK5AA9592F53F69C16 foreign key (pair, domain) references pair (pair_key, domain);
- migration.alterTable("diffs")
- .addForeignKey("FK5AA9592F53F69C16", Array("pair", "domain"), "pair", Array("pair_key", "domain"))
-
- migration.alterTable("pending_diffs")
- .addForeignKey("FK75E457E44AD37D84", Array("pair", "domain"), "pair", Array("pair_key", "domain"))
- }
},
new HibernateMigrationStep {
@@ -576,7 +568,7 @@ object HibernateConfigStorePreparationStep {
// Report escalations don't have a configured origin, so relax the constraint on origin being mandatory
migration.alterTable("escalations").
- alterColumn("origin", Types.VARCHAR, 255, true, null)
+ setColumnNullable("origin", true)
migration
}
@@ -746,15 +738,11 @@ object HibernateConfigStorePreparationStep {
dropColumn("dep_domain").
dropColumn("uep_domain")
- applyConstraint(migration)
-
- migration
- }
-
- def applyConstraint(migration:MigrationBuilder) {
migration.alterTable("pair").
addForeignKey("FK3462DAF68A3C7", Array("upstream", "domain"), "endpoint", Array("name", "domain")).
addForeignKey("FK3462DAF2DA557F", Array("downstream", "domain"), "endpoint", Array("name", "domain"))
+
+ migration
}
}
)
@@ -58,7 +58,7 @@ class OracleDifferencePartitioningHook(sessionFactory:SessionFactory) extends Di
def generatePartitionName(domain:String, key:String) = "p_" + DigestUtils.md5Hex(domain + "_" + key).substring(0, 28)
def hasPartition(s:Session, name:String) = {
- val query = s.createSQLQuery("select count(*) from all_tab_partitions where table_name='DIFFS' and partition_name=:name")
+ val query = s.createSQLQuery("select count(*) from user_tab_partitions where table_name='DIFFS' and partition_name=:name")
query.setString("name", name.toUpperCase) // Oracle will have forced the partition names to uppercase
query.uniqueResult().asInstanceOf[java.math.BigDecimal].longValue() > 0
}
Oops, something went wrong.

0 comments on commit 52c21bc

Please sign in to comment.