diff --git a/.gitignore b/.gitignore index e231e612f..0c54f9fb7 100644 --- a/.gitignore +++ b/.gitignore @@ -7,4 +7,5 @@ target .idea *.iml **/.DS_Store - +db2jcc4.jar +ojdbc7.jar diff --git a/.travis.yml b/.travis.yml index 3f40195c1..d5ca1079b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,6 +11,7 @@ env: - DB=postgresql - DB=mysql - DB=sqlserver + - DB=db2 addons: postgresql: "9.6" services: @@ -18,6 +19,8 @@ services: before_script: - export SPRING_PROFILES_ACTIVE=nflow.db.$DB - ./travis/setup-db-$DB.sh +script: + - mvn test -B -P $DB after_script: - for i in nflow-*/target/surefire-reports/*.txt; do echo ">>>>>>>>>>>>>>>>>>>"; echo $i; echo "<<<<<<<<<<<<<<<<<<<<<"; cat $i; done notifications: diff --git a/CHANGELOG.md b/CHANGELOG.md index b8668ed14..9f1e6be02 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,11 @@ -## 5.2.1 (yyyy-MM-dd) +## 5.3.0 (yyyy-MM-dd) **Highlights** +- Add experimental DB2 support + +**Breaking changes** +- nFlow `Datasource` uses now underlying database specific `Driver`s instead of `DataSource`s. + Make a corresponding change, if you have customized `nflow.db.*.driver` parameters. **Details** - Upgraded Spring to version 5.1.3.RELEASE diff --git a/README.md b/README.md index 3bc37247d..f9cd0fcbd 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ nFlow is a battle-proven solution for orchestrating business processes. Dependin * High availability — the same workflows can be processed by multiple deployments * Fault tolerant — automatic recovery if runtime environment crashes * Atomic state updates — uses and requires a relational database for atomic state updates and locking -* Multiple databases supported — PostgreSQL, MySQL, Oracle, Microsoft SQL Server, H2 +* Multiple databases supported — PostgreSQL, MySQL, Oracle, Microsoft SQL Server, DB2, H2 * Open Source under EUPL # Getting Started diff --git a/nflow-engine/src/main/java/io/nflow/engine/config/Profiles.java b/nflow-engine/src/main/java/io/nflow/engine/config/Profiles.java index 160977926..898363b53 100644 --- a/nflow-engine/src/main/java/io/nflow/engine/config/Profiles.java +++ b/nflow-engine/src/main/java/io/nflow/engine/config/Profiles.java @@ -5,6 +5,11 @@ */ public abstract class Profiles { + /** + * Profile to enable DB2 database. + */ + public static final String DB2 = "nflow.db.db2"; + /** * Profile to enable H2 database. */ diff --git a/nflow-engine/src/main/java/io/nflow/engine/config/db/DatabaseConfiguration.java b/nflow-engine/src/main/java/io/nflow/engine/config/db/DatabaseConfiguration.java index d546b5744..eeaddd58b 100644 --- a/nflow-engine/src/main/java/io/nflow/engine/config/db/DatabaseConfiguration.java +++ b/nflow-engine/src/main/java/io/nflow/engine/config/db/DatabaseConfiguration.java @@ -58,24 +58,24 @@ public DataSource nflowDatasource(Environment env, BeanFactory appCtx) { logger.info("Database connection to {} using {}", dbType, url); HikariConfig config = new HikariConfig(); config.setPoolName("nflow"); - config.setDataSourceClassName(property(env, "driver")); - config.addDataSourceProperty("url", url); + config.setDriverClassName(property(env, "driver")); + config.setJdbcUrl(url); config.setUsername(property(env, "user")); config.setPassword(property(env, "password")); config.setMaximumPoolSize(property(env, "max_pool_size", Integer.class)); config.setIdleTimeout(property(env, "idle_timeout_seconds", Long.class) * 1000); config.setAutoCommit(true); setMetricRegistryIfBeanFoundOnClassPath(config, appCtx); - return new HikariDataSource(config); + DataSource nflowDataSource = new HikariDataSource(config); + checkDatabaseConfiguration(env, nflowDataSource); + return nflowDataSource; } private void setMetricRegistryIfBeanFoundOnClassPath(HikariConfig config, BeanFactory appCtx) { try { Class metricClass = Class.forName("com.codahale.metrics.MetricRegistry"); Object metricRegistry = appCtx.getBean(metricClass); - if (metricRegistry != null) { - config.setMetricRegistry(metricRegistry); - } + config.setMetricRegistry(metricRegistry); } catch (@SuppressWarnings("unused") ClassNotFoundException | NoSuchBeanDefinitionException e) { // ignored - metrics is an optional dependency } @@ -161,4 +161,12 @@ public DatabaseInitializer nflowDatabaseInitializer(@NFlow DataSource dataSource return new DatabaseInitializer(dbType, dataSource, env); } + /** + * Checks that the database is configured as nFlow expects. + * @param env The Spring environment. + * @param dataSource The nFlow datasource. + */ + protected void checkDatabaseConfiguration(Environment env, DataSource dataSource) { + // no common checks for all databases + } } diff --git a/nflow-engine/src/main/java/io/nflow/engine/config/db/Db2DatabaseConfiguration.java b/nflow-engine/src/main/java/io/nflow/engine/config/db/Db2DatabaseConfiguration.java new file mode 100644 index 000000000..730c599af --- /dev/null +++ b/nflow-engine/src/main/java/io/nflow/engine/config/db/Db2DatabaseConfiguration.java @@ -0,0 +1,201 @@ +package io.nflow.engine.config.db; + +import static io.nflow.engine.config.Profiles.DB2; +import static io.nflow.engine.internal.dao.DaoUtil.toTimestamp; +import static java.lang.System.currentTimeMillis; +import static java.util.concurrent.TimeUnit.HOURS; +import static java.util.concurrent.TimeUnit.MILLISECONDS; + +import java.sql.*; +import java.time.ZoneId; +import java.util.Calendar; +import java.util.Objects; +import java.util.Optional; +import java.util.TimeZone; + +import javax.sql.DataSource; + +import org.joda.time.DateTime; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Profile; +import org.springframework.core.env.Environment; +import org.springframework.jdbc.core.JdbcTemplate; + +import io.nflow.engine.internal.storage.db.SQLVariants; +import io.nflow.engine.workflow.instance.WorkflowInstance.WorkflowInstanceStatus; + +/** + * Configuration for DB2 database. Note: tested only using DB2 Express-C (Docker: ibmcom/db2express-c). + */ +@Profile(DB2) +@Configuration +public class Db2DatabaseConfiguration extends DatabaseConfiguration { + + /** + * Create a new instance. + */ + public Db2DatabaseConfiguration() { + super("db2"); + } + + /** + * Creates the SQL variants for DB2. + * @param env The Spring environment for getting the configuration property values. + * @return SQL variants optimized for DB2. + */ + @Bean + public SQLVariants sqlVariants(Environment env) { + return new Db2SQLVariants(property(env, "timezone")); + } + + @Override + protected void checkDatabaseConfiguration(Environment env, DataSource dataSource) { + JdbcTemplate jdbc = new JdbcTemplate(dataSource); + Long dbTimeZoneOffsetHours = jdbc.queryForObject("select current timezone from sysibm.sysdummy1", Long.class); + Long propsTimeZoneOffsetHours = HOURS.convert( + TimeZone.getTimeZone(property(env, "timezone")).getOffset(currentTimeMillis()), MILLISECONDS); + if (!Objects.equals(dbTimeZoneOffsetHours, propsTimeZoneOffsetHours)) { + throw new RuntimeException("Database has unexpected time zone - hour offset in DB2 is " + dbTimeZoneOffsetHours + + " but the expected hour offset based on timezone-property is " + propsTimeZoneOffsetHours + + ". Change the timezone-property to match with your DB2 time zone."); + } + } + + /** + * SQL variants optimized for DB2. + */ + public static class Db2SQLVariants implements SQLVariants { + + private final ZoneId dbTimeZoneId; + + public Db2SQLVariants(String dbTimeZoneIdStr) { + dbTimeZoneId = ZoneId.of(dbTimeZoneIdStr); + } + + /** + * Returns SQL representing the current database time plus given amount of seconds. + */ + @Override + public String currentTimePlusSeconds(int seconds) { + return "current_timestamp + " + seconds + " SECONDS"; + } + + /** + * Returns false as DB2 does not support update returning clause. + */ + @Override + public boolean hasUpdateReturning() { + return false; + } + + /** + * Returns false as DB2 does not support updateable CTEs. + */ + @Override + public boolean hasUpdateableCTE() { + return false; + } + + @Override + public String nextActivationUpdate() { + return "(case " // + + "when ? is null then null " // + + "when external_next_activation is null then ? " // + + "else least(?, external_next_activation) end)"; + } + + /** + * Returns the SQL representation for given workflow instance status. + */ + @Override + public String workflowStatus(WorkflowInstanceStatus status) { + return "'" + status.name() + "'"; + } + + /** + * Returns SQL representing the workflow instance status parameter. + */ + @Override + public String workflowStatus() { + return "?"; + } + + /** + * Returns SQL representing the action type parameter. + */ + @Override + public String actionType() { + return "?"; + } + + /** + * Returns string for casting value to text. + */ + @Override + public String castToText() { + return ""; + } + + /** + * Returns SQL for a query with a limit of results. + */ + @Override + public String limit(String query, long limit) { + // note: limit must be a number, because NamedJdbcTemplate does not set variables (e.g. :limit) here + return query + " fetch first " + limit + " rows only"; + } + + /** + * Returns the SQL type for long text. + */ + @Override + public int longTextType() { + return Types.VARCHAR; + } + + /** + * Returns true as DB2 Express-C supports batch updates. + */ + @Override + public boolean useBatchUpdate() { + return true; + } + + @Override + public Object getTimestamp(ResultSet rs, String columnName) throws SQLException { + return Optional.ofNullable(rs.getTimestamp(columnName)) + .map(ts -> new Timestamp(ts.getTime() + timeZoneMismatchInMillis())) + .orElse(null); + } + + @Override + public DateTime getDateTime(ResultSet rs, String columnName) throws SQLException { + return Optional.ofNullable(rs.getTimestamp(columnName)) + .map(ts -> new DateTime(ts.getTime() + timeZoneMismatchInMillis())) + .orElse(null); + } + + @Override + public void setDateTime(PreparedStatement ps, int columnNumber, DateTime timestamp) throws SQLException { + ps.setTimestamp(columnNumber, toTimestamp(timestamp), Calendar.getInstance(TimeZone.getTimeZone("UTC"))); + } + + @Override + public Object toTimestampObject(DateTime timestamp) { + return Optional.ofNullable(timestamp) + .map(ts -> new Timestamp(timestamp.getMillis() - timeZoneMismatchInMillis())) + .orElse(null); + } + + @Override + public Object tuneTimestampForDb(Object timestamp) { + return new Timestamp(((Timestamp)timestamp).getTime() - timeZoneMismatchInMillis()); + } + + private long timeZoneMismatchInMillis() { + long now = currentTimeMillis(); + return TimeZone.getDefault().getOffset(now) - TimeZone.getTimeZone(dbTimeZoneId).getOffset(now); + } + } +} diff --git a/nflow-engine/src/main/java/io/nflow/engine/config/db/H2DatabaseConfiguration.java b/nflow-engine/src/main/java/io/nflow/engine/config/db/H2DatabaseConfiguration.java index 1b4eb6fc3..102439ef6 100644 --- a/nflow-engine/src/main/java/io/nflow/engine/config/db/H2DatabaseConfiguration.java +++ b/nflow-engine/src/main/java/io/nflow/engine/config/db/H2DatabaseConfiguration.java @@ -144,7 +144,7 @@ public String castToText() { * Returns SQL for a query with a limit of results. */ @Override - public String limit(String query, String limit) { + public String limit(String query, long limit) { return query + " limit " + limit; } diff --git a/nflow-engine/src/main/java/io/nflow/engine/config/db/MysqlDatabaseConfiguration.java b/nflow-engine/src/main/java/io/nflow/engine/config/db/MysqlDatabaseConfiguration.java index 6a7bd113a..7e849f576 100644 --- a/nflow-engine/src/main/java/io/nflow/engine/config/db/MysqlDatabaseConfiguration.java +++ b/nflow-engine/src/main/java/io/nflow/engine/config/db/MysqlDatabaseConfiguration.java @@ -160,7 +160,7 @@ public String castToText() { * Returns SQL for a query with a limit of results. */ @Override - public String limit(String query, String limit) { + public String limit(String query, long limit) { return query + " limit " + limit; } diff --git a/nflow-engine/src/main/java/io/nflow/engine/config/db/OracleDatabaseConfiguration.java b/nflow-engine/src/main/java/io/nflow/engine/config/db/OracleDatabaseConfiguration.java index cca452a9d..a52ca252e 100644 --- a/nflow-engine/src/main/java/io/nflow/engine/config/db/OracleDatabaseConfiguration.java +++ b/nflow-engine/src/main/java/io/nflow/engine/config/db/OracleDatabaseConfiguration.java @@ -160,7 +160,7 @@ public String castToText() { * Returns SQL for a query with a limit of results. */ @Override - public String limit(String query, String limit) { + public String limit(String query, long limit) { return "select * from (" + query + ") where rownum <= " + limit; } diff --git a/nflow-engine/src/main/java/io/nflow/engine/config/db/PgDatabaseConfiguration.java b/nflow-engine/src/main/java/io/nflow/engine/config/db/PgDatabaseConfiguration.java index 10b289bdd..619c8e7a3 100644 --- a/nflow-engine/src/main/java/io/nflow/engine/config/db/PgDatabaseConfiguration.java +++ b/nflow-engine/src/main/java/io/nflow/engine/config/db/PgDatabaseConfiguration.java @@ -110,7 +110,7 @@ public String castToText() { * Returns SQL for a query with a limit of results. */ @Override - public String limit(String query, String limit) { + public String limit(String query, long limit) { return query + " limit " + limit; } diff --git a/nflow-engine/src/main/java/io/nflow/engine/config/db/SqlServerDatabaseConfiguration.java b/nflow-engine/src/main/java/io/nflow/engine/config/db/SqlServerDatabaseConfiguration.java index 3a8735513..73aca0d99 100644 --- a/nflow-engine/src/main/java/io/nflow/engine/config/db/SqlServerDatabaseConfiguration.java +++ b/nflow-engine/src/main/java/io/nflow/engine/config/db/SqlServerDatabaseConfiguration.java @@ -190,7 +190,7 @@ public String castToText() { * Returns SQL for a query with a limit of results. */ @Override - public String limit(String query, String limit) { + public String limit(String query, long limit) { int idx = query.indexOf("select "); return query.substring(0, idx + 7) + "top(" + limit + ") " + query.substring(idx + 7); } diff --git a/nflow-engine/src/main/java/io/nflow/engine/internal/dao/ArchiveDao.java b/nflow-engine/src/main/java/io/nflow/engine/internal/dao/ArchiveDao.java index 92fe65c80..e18b80845 100644 --- a/nflow-engine/src/main/java/io/nflow/engine/internal/dao/ArchiveDao.java +++ b/nflow-engine/src/main/java/io/nflow/engine/internal/dao/ArchiveDao.java @@ -58,7 +58,7 @@ public List listArchivableWorkflows(DateTime before, int maxRows) { " select 1 from nflow_workflow child where child.root_workflow_id = parent.id " + " and (" + sqlVariants.dateLtEqDiff("?", "child.modified") + " or child.next_activation is not null)" + " )" + - " order by modified asc ", String.valueOf(maxRows)) + + " order by modified asc ", maxRows) + ") as archivable_parent " + "where archivable_parent.id = w.id or archivable_parent.id = w.root_workflow_id", new ArchivableWorkflowsRowMapper(), sqlVariants.toTimestampObject(before), sqlVariants.toTimestampObject(before)); diff --git a/nflow-engine/src/main/java/io/nflow/engine/internal/dao/WorkflowInstanceDao.java b/nflow-engine/src/main/java/io/nflow/engine/internal/dao/WorkflowInstanceDao.java index efe896c19..8e01409a4 100644 --- a/nflow-engine/src/main/java/io/nflow/engine/internal/dao/WorkflowInstanceDao.java +++ b/nflow-engine/src/main/java/io/nflow/engine/internal/dao/WorkflowInstanceDao.java @@ -548,7 +548,7 @@ String whereConditionForInstanceUpdate() { private List pollNextWorkflowInstanceIdsWithUpdateReturning(int batchSize) { String sql = updateInstanceForExecutionQuery() + " where id in (" - + sqlVariants.limit("select id from nflow_workflow " + whereConditionForInstanceUpdate(), Integer.toString(batchSize)) + + sqlVariants.limit("select id from nflow_workflow " + whereConditionForInstanceUpdate(), batchSize) + ") and executor_id is null returning id"; return jdbc.queryForList(sql, Integer.class); } @@ -557,14 +557,9 @@ private List pollNextWorkflowInstanceIdsWithTransaction(final int batch return transaction.execute(new TransactionCallback>() { @Override public List doInTransaction(TransactionStatus transactionStatus) { - String sql = sqlVariants.limit("select id, modified from nflow_workflow " + whereConditionForInstanceUpdate(), - Integer.toString(batchSize)); - List instances = jdbc.query(sql, new RowMapper() { - @Override - public OptimisticLockKey mapRow(ResultSet rs, int rowNum) throws SQLException { - return new OptimisticLockKey(rs.getInt("id"), sqlVariants.getTimestamp(rs, "modified")); - } - }); + String sql = sqlVariants.limit("select id, modified from nflow_workflow " + whereConditionForInstanceUpdate(), batchSize); + List instances = jdbc.query(sql, (rs, rowNum) -> + new OptimisticLockKey(rs.getInt("id"), sqlVariants.getTimestamp(rs, "modified"))); if (instances.isEmpty()) { return emptyList(); } @@ -582,7 +577,7 @@ private void updateNextWorkflowInstancesWithMultipleUpdates(List instances, List ids) { List batchArgs = new ArrayList<>(instances.size()); for (OptimisticLockKey instance : instances) { - batchArgs.add(new Object[] { instance.id, instance.modified }); + batchArgs.add(new Object[] { instance.id, sqlVariants.tuneTimestampForDb(instance.modified) }); ids.add(instance.id); } int[] updateStatuses = jdbc @@ -686,8 +681,7 @@ public List queryWorkflowInstances(QueryWorkflowInstances quer conditions.add("w.executor_group = :executor_group"); params.addValue("executor_group", executorInfo.getExecutorGroup()); sql += " where " + collectionToDelimitedString(conditions, " and ") + " order by w.created desc"; - sql = sqlVariants.limit(sql, ":limit"); - params.addValue("limit", getMaxResults(query.maxResults)); + sql = sqlVariants.limit(sql, getMaxResults(query.maxResults)); List ret = namedJdbc.query(sql, params, new WorkflowInstanceRowMapper()).stream() .map(WorkflowInstance.Builder::build).collect(toList()); for (WorkflowInstance instance : ret) { @@ -712,11 +706,7 @@ private void fillChildWorkflowIds(final WorkflowInstance instance) { public void processRow(ResultSet rs) throws SQLException { int parentActionId = rs.getInt(1); int childWorkflowInstanceId = rs.getInt(2); - List children = instance.childWorkflows.get(parentActionId); - if (children == null) { - children = new ArrayList<>(); - instance.childWorkflows.put(parentActionId, children); - } + List children = instance.childWorkflows.computeIfAbsent(parentActionId, ArrayList::new); children.add(childWorkflowInstanceId); } }, instance.id); @@ -732,8 +722,8 @@ private long getMaxResults(Long maxResults) { private void fillActions(WorkflowInstance instance, boolean includeStateVariables, Long maxActions) { Map> actionStates = includeStateVariables ? fetchActionStateVariables(instance) : EMPTY_ACTION_STATE_MAP; - String limit = Long.toString(getMaxActions(maxActions)); - String sql = sqlVariants.limit("select * from nflow_workflow_action where workflow_id = ? order by id desc", limit); + String sql = sqlVariants.limit("select nflow_workflow_action.* from nflow_workflow_action where workflow_id = ? order by id desc", + getMaxActions(maxActions)); instance.actions.addAll(jdbc.query(sql, new WorkflowInstanceActionRowMapper(sqlVariants, actionStates), instance.id)); } diff --git a/nflow-engine/src/main/java/io/nflow/engine/internal/storage/db/SQLVariants.java b/nflow-engine/src/main/java/io/nflow/engine/internal/storage/db/SQLVariants.java index 86b744fee..93f8ab141 100644 --- a/nflow-engine/src/main/java/io/nflow/engine/internal/storage/db/SQLVariants.java +++ b/nflow-engine/src/main/java/io/nflow/engine/internal/storage/db/SQLVariants.java @@ -1,14 +1,13 @@ package io.nflow.engine.internal.storage.db; -import io.nflow.engine.workflow.instance.WorkflowInstance.WorkflowInstanceStatus; -import org.joda.time.DateTime; +import static io.nflow.engine.internal.dao.DaoUtil.toDateTime; +import static io.nflow.engine.internal.dao.DaoUtil.toTimestamp; +import io.nflow.engine.workflow.instance.WorkflowInstance.WorkflowInstanceStatus; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; - -import static io.nflow.engine.internal.dao.DaoUtil.toDateTime; -import static io.nflow.engine.internal.dao.DaoUtil.toTimestamp; +import org.joda.time.DateTime; public interface SQLVariants { String currentTimePlusSeconds(int seconds); @@ -27,7 +26,7 @@ public interface SQLVariants { String castToText(); - String limit(String query, String limit); + String limit(String query, long limit); int longTextType(); @@ -52,4 +51,8 @@ default void setDateTime(PreparedStatement ps, int columnNumber, DateTime timest default Object toTimestampObject(DateTime timestamp) { return toTimestamp(timestamp); } + + default Object tuneTimestampForDb(Object timestamp) { + return timestamp; + } } diff --git a/nflow-engine/src/main/resources/nflow-engine.properties b/nflow-engine/src/main/resources/nflow-engine.properties index c04c4da0e..477fdce0c 100644 --- a/nflow-engine/src/main/resources/nflow-engine.properties +++ b/nflow-engine/src/main/resources/nflow-engine.properties @@ -21,29 +21,35 @@ nflow.workflow.instance.query.max.actions.default=100 nflow.unknown.workflow.type.retry.delay.minutes=60 nflow.unknown.workflow.state.retry.delay.minutes=60 -nflow.db.h2.driver=org.h2.jdbcx.JdbcDataSource +nflow.db.h2.driver=org.h2.Driver nflow.db.h2.url=jdbc:h2:mem:test;TRACE_LEVEL_FILE=4 nflow.db.h2.user=sa nflow.db.h2.password= nflow.db.h2.tcp.port=8043 nflow.db.h2.console.port=8044 -nflow.db.mysql.driver=com.mysql.cj.jdbc.MysqlDataSource +nflow.db.mysql.driver=com.mysql.cj.jdbc.Driver nflow.db.mysql.url=jdbc:mysql://localhost/nflow nflow.db.mysql.user=nflow nflow.db.mysql.password=nflow -nflow.db.postgresql.driver=org.postgresql.ds.PGSimpleDataSource +nflow.db.postgresql.driver=org.postgresql.Driver nflow.db.postgresql.url=jdbc:postgresql://localhost/nflow nflow.db.postgresql.user=nflow nflow.db.postgresql.password=nflow -nflow.db.sqlserver.driver=com.microsoft.sqlserver.jdbc.SQLServerDataSource +nflow.db.sqlserver.driver=com.microsoft.sqlserver.jdbc.SQLServerDriver nflow.db.sqlserver.url=jdbc:sqlserver://localhost;databaseName=nflow nflow.db.sqlserver.user=nflow nflow.db.sqlserver.password=nFlow42% -nflow.db.oracle.driver=oracle.jdbc.pool.OracleDataSource +nflow.db.db2.driver=com.ibm.db2.jcc.DB2Driver +nflow.db.db2.url=jdbc:db2://localhost:50000/nflow +nflow.db.db2.user=db2inst1 +nflow.db.db2.password=nflow +nflow.db.db2.timezone=UTC + +nflow.db.oracle.driver=oracle.jdbc.driver.OracleDriver nflow.db.oracle.url=jdbc:oracle:thin:@//localhost:1521/XE nflow.db.oracle.user=nflow nflow.db.oracle.password=nflow diff --git a/nflow-engine/src/main/resources/scripts/db/db2.create.ddl.sql b/nflow-engine/src/main/resources/scripts/db/db2.create.ddl.sql new file mode 100644 index 000000000..c7e3f04a0 --- /dev/null +++ b/nflow-engine/src/main/resources/scripts/db/db2.create.ddl.sql @@ -0,0 +1,141 @@ +-- production tables + +create table nflow_workflow ( + id int primary key generated always as identity, + status varchar(32) not null check (status in ('created', 'executing', 'inProgress', 'finished', 'manual')), + type varchar(64) not null, + root_workflow_id integer default null, + parent_workflow_id integer default null, + parent_action_id integer default null, + business_key varchar(64), + external_id varchar(64) not null, + state varchar(64) not null, + state_text varchar(128), + next_activation timestamp(3), + external_next_activation timestamp(3), + executor_id int, + retries int not null default 0, + created timestamp(3) not null default current_timestamp, + modified timestamp(3) not null default current_timestamp, + executor_group varchar(64) not null, + workflow_signal int, + constraint nflow_workflow_uniq unique (type, external_id, executor_group) +); + +create or replace trigger nflow_workflow_update_modified + before update on nflow_workflow + referencing new as n + for each row + set modified = current timestamp; + +drop index nflow_workflow_activation; +create index nflow_workflow_activation on nflow_workflow(next_activation, modified); + +create table nflow_workflow_action ( + id int primary key generated always as identity, + workflow_id int not null, + executor_id int not null default -1, + type varchar(32) not null check (type in ('stateExecution', 'stateExecutionFailed', 'recovery', 'externalChange')), + state varchar(64) not null, + state_text varchar(128), + retry_no int not null, + execution_start timestamp(3) not null, + execution_end timestamp(3) not null, + foreign key (workflow_id) references nflow_workflow(id) on delete cascade, + constraint nflow_workflow_action_uniq unique (workflow_id, id) +); + +alter table nflow_workflow add constraint fk_workflow_parent + foreign key (parent_workflow_id, parent_action_id) references nflow_workflow_action (workflow_id, id) on delete cascade; + +alter table nflow_workflow add constraint fk_workflow_root + foreign key (root_workflow_id) references nflow_workflow (id) on delete cascade; + +create table nflow_workflow_state ( + workflow_id int not null, + action_id int not null, + state_key varchar(64) not null, + state_value varchar(10240) not null, + primary key (workflow_id, action_id, state_key), + foreign key (workflow_id) references nflow_workflow(id) on delete cascade +); + +create table nflow_executor ( + id int primary key generated always as identity, + host varchar(253) not null, + pid int not null, + executor_group varchar(64), + started timestamp(3) not null default current_timestamp, + active timestamp(3) not null, + expires timestamp(3) not null, + stopped timestamp(3) +); + +create table nflow_workflow_definition ( + type varchar(64) not null, + definition_sha1 varchar(40) not null, + definition varchar(10240) not null, + created timestamp(3) not null default current_timestamp, + modified timestamp(3) not null default current_timestamp, + modified_by int not null, + executor_group varchar(64) not null, + primary key (type, executor_group) +); + +create or replace trigger nflow_workflow_definition_update_modified + before update on nflow_workflow_definition + referencing new as n + for each row + set modified = current timestamp; + +-- Archive tables +-- - no default values +-- - no triggers +-- - no auto increments +-- - same indexes and constraints as production tables +-- - remove recursive foreign keys + +create table nflow_archive_workflow ( + id int not null primary key, + status varchar(32) not null, + type varchar(64) not null, + root_workflow_id integer, + parent_workflow_id integer, + parent_action_id integer, + business_key varchar(64), + external_id varchar(64) not null, + state varchar(64) not null, + state_text varchar(128), + next_activation timestamp(3), + external_next_activation timestamp(3), + executor_id int, + retries int not null default 0, + created timestamp(3) not null, + modified timestamp(3) not null, + executor_group varchar(64) not null, + workflow_signal int, + constraint nflow_archive_workflow_uniq unique (type, external_id, executor_group) +); + +create table nflow_archive_workflow_action ( + id int not null primary key, + workflow_id int not null, + executor_id int not null, + type varchar(32) not null, + state varchar(64) not null, + state_text varchar(128), + retry_no int not null, + execution_start timestamp(3) not null, + execution_end timestamp(3) not null, + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade, + constraint nflow_archive_workflow_action_uniq unique (workflow_id, id) +); + +create table nflow_archive_workflow_state ( + workflow_id int not null, + action_id int not null, + state_key varchar(64) not null, + state_value varchar(10240) not null, + primary key (workflow_id, action_id, state_key), + foreign key (workflow_id) references nflow_archive_workflow(id) on delete cascade +); diff --git a/nflow-jetty/pom.xml b/nflow-jetty/pom.xml index 842047c26..0b0f27c7b 100644 --- a/nflow-jetty/pom.xml +++ b/nflow-jetty/pom.xml @@ -171,5 +171,22 @@ + + db2 + + + ${basedir}/../db2jcc4.jar + + + + + com.ibm.db2 + db2jcc4 + 4.24.92 + system + ${basedir}/../db2jcc4.jar + + + diff --git a/nflow-netty/pom.xml b/nflow-netty/pom.xml index f7b809f37..7c3278b94 100644 --- a/nflow-netty/pom.xml +++ b/nflow-netty/pom.xml @@ -76,4 +76,40 @@ test + + + oracle + + + ${basedir}/../ojdbc7.jar + + + + + com.oracle + ojdbc7 + 12.1.0.2.0 + system + ${basedir}/../ojdbc7.jar + + + + + db2 + + + ${basedir}/../db2jcc4.jar + + + + + com.ibm.db2 + db2jcc4 + 4.24.92 + system + ${basedir}/../db2jcc4.jar + + + + diff --git a/nflow-tests/pom.xml b/nflow-tests/pom.xml index 20094eaba..e5a943cbf 100644 --- a/nflow-tests/pom.xml +++ b/nflow-tests/pom.xml @@ -101,6 +101,23 @@ + + db2 + + + ${basedir}/../db2jcc4.jar + + + + + com.ibm.db2 + db2jcc4 + 4.24.92 + system + ${basedir}/../db2jcc4.jar + + + diff --git a/nflow-tests/src/test/java/io/nflow/tests/PreviewCreditApplicationWorkflowTest.java b/nflow-tests/src/test/java/io/nflow/tests/PreviewCreditApplicationWorkflowTest.java index d755f9ae5..051b89968 100644 --- a/nflow-tests/src/test/java/io/nflow/tests/PreviewCreditApplicationWorkflowTest.java +++ b/nflow-tests/src/test/java/io/nflow/tests/PreviewCreditApplicationWorkflowTest.java @@ -16,6 +16,7 @@ import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status.Family; +import org.joda.time.DateTime; import org.junit.ClassRule; import org.junit.FixMethodOrder; import org.junit.Test; @@ -42,6 +43,7 @@ public PreviewCreditApplicationWorkflowTest() { private static CreateWorkflowInstanceRequest req; private static CreateWorkflowInstanceResponse resp; + private static DateTime wfModifiedAtAcceptCreditApplication; @Test public void t01_createCreditApplicationWorkflow() { @@ -62,6 +64,7 @@ public void t02_checkAcceptCreditApplicationReached() throws InterruptedExceptio do { response = getWorkflowInstance(resp.id, "acceptCreditApplication"); } while (response.nextActivation != null); + wfModifiedAtAcceptCreditApplication = response.modified; assertTrue(response.stateVariables.containsKey("info")); } @@ -81,6 +84,7 @@ public void t04_checkDoneStateReached() throws InterruptedException { do { response = getWorkflowInstance(resp.id, "done"); } while (response.nextActivation != null); + assertTrue("nflow_workflow.modified should be updated", response.modified.isAfter(wfModifiedAtAcceptCreditApplication)); } @Test diff --git a/travis/setup-db-db2.sh b/travis/setup-db-db2.sh new file mode 100755 index 000000000..e48813866 --- /dev/null +++ b/travis/setup-db-db2.sh @@ -0,0 +1,9 @@ +#!/bin/bash -ev + +docker run --rm --name db2 --entrypoint /bin/sh ibmcom/db2express-c:latest -c "cat /home/db2inst1/sqllib/java/db2jcc4.jar" > db2jcc4.jar + +docker run --rm --name db2 -e 'DB2INST1_PASSWORD=nflow' -e 'LICENSE=accept' --publish 50000:50000 --detach ibmcom/db2express-c:latest db2start +sleep 5 + +docker exec -it db2 su - db2inst1 -c '/home/db2inst1/sqllib/bin/db2 -tvs "CREATE DATABASE nflow USING CODESET UTF-8 TERRITORY us;"' +docker exec -it db2 su - db2inst1 -c '/home/db2inst1/sqllib/bin/db2 -tvs "ACTIVATE DATABASE nflow;"'