Skip to content

Commit

Permalink
Fix to handle version-specific integration tests
Browse files Browse the repository at this point in the history
  • Loading branch information
jnmt committed Aug 25, 2023
1 parent 2041ae1 commit 0782ebe
Show file tree
Hide file tree
Showing 7 changed files with 173 additions and 67 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/ci.yaml
Expand Up @@ -240,7 +240,7 @@ jobs:
- name: Setup and execute Gradle 'integrationTestJdbc' task
uses: gradle/gradle-build-action@v2
with:
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:postgresql://localhost:5432/ -Dscalardb.jdbc.username=postgres -Dscalardb.jdbc.password=postgres
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:postgresql://localhost:5432/ -Dscalardb.jdbc.username=postgres -Dscalardb.jdbc.password=postgres -Dscalardb.jdbc.database.version.major=14

- name: Upload Gradle test reports
if: always()
Expand Down Expand Up @@ -284,7 +284,7 @@ jobs:
uses: gradle/gradle-build-action@v2
# Addressing the Oracle container with "@localhost" in the JDBC url cause the JDBC connection to fail for an unknown reason. As a workaround, using instead the container ip address is working.
with:
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:oracle:thin:@${{ env.oracle_db_ip }}:1521/XEPDB1 -Dscalardb.jdbc.username=SYSTEM -Dscalardb.jdbc.password=Oracle21
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:oracle:thin:@${{ env.oracle_db_ip }}:1521/XEPDB1 -Dscalardb.jdbc.username=SYSTEM -Dscalardb.jdbc.password=Oracle21 -Dscalardb.jdbc.database.version.major=21

- name: Upload Gradle test reports
if: always()
Expand Down
12 changes: 6 additions & 6 deletions .github/workflows/supported_storages_compatibility_check.yaml
Expand Up @@ -255,7 +255,7 @@ jobs:
- name: Setup and execute Gradle 'integrationTestJdbc' task
uses: gradle/gradle-build-action@v2
with:
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:postgresql://localhost:5432/ -Dscalardb.jdbc.username=postgres -Dscalardb.jdbc.password=postgres
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:postgresql://localhost:5432/ -Dscalardb.jdbc.username=postgres -Dscalardb.jdbc.password=postgres -Dscalardb.jdbc.database.version.major=12

- name: Upload Gradle test reports
if: always()
Expand Down Expand Up @@ -288,7 +288,7 @@ jobs:
- name: Setup and execute Gradle 'integrationTestJdbc' task
uses: gradle/gradle-build-action@v2
with:
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:postgresql://localhost:5432/ -Dscalardb.jdbc.username=postgres -Dscalardb.jdbc.password=postgres
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:postgresql://localhost:5432/ -Dscalardb.jdbc.username=postgres -Dscalardb.jdbc.password=postgres -Dscalardb.jdbc.database.version.major=13

- name: Upload Gradle test reports
if: always()
Expand Down Expand Up @@ -321,7 +321,7 @@ jobs:
- name: Setup and execute Gradle 'integrationTestJdbc' task
uses: gradle/gradle-build-action@v2
with:
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:postgresql://localhost:5432/ -Dscalardb.jdbc.username=postgres -Dscalardb.jdbc.password=postgres
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:postgresql://localhost:5432/ -Dscalardb.jdbc.username=postgres -Dscalardb.jdbc.password=postgres -Dscalardb.jdbc.database.version.major=14

- name: Upload Gradle test reports
if: always()
Expand Down Expand Up @@ -354,7 +354,7 @@ jobs:
- name: Setup and execute Gradle 'integrationTestJdbc' task
uses: gradle/gradle-build-action@v2
with:
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:postgresql://localhost:5432/ -Dscalardb.jdbc.username=postgres -Dscalardb.jdbc.password=postgres
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:postgresql://localhost:5432/ -Dscalardb.jdbc.username=postgres -Dscalardb.jdbc.password=postgres -Dscalardb.jdbc.database.version.major=15

- name: Upload Gradle test reports
if: always()
Expand Down Expand Up @@ -398,7 +398,7 @@ jobs:
uses: gradle/gradle-build-action@v2
# Addressing the Oracle container with "@localhost" in the JDBC url cause the JDBC connection to fail for an unknown reason. As a workaround, using instead the container ip address is working.
with:
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:oracle:thin:@${{ env.oracle_db_ip }}:1521/XEPDB1 -Dscalardb.jdbc.username=SYSTEM -Dscalardb.jdbc.password=Oracle
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:oracle:thin:@${{ env.oracle_db_ip }}:1521/XEPDB1 -Dscalardb.jdbc.username=SYSTEM -Dscalardb.jdbc.password=Oracle -Dscalardb.jdbc.database.version.major=18

- name: Upload Gradle test reports
if: always()
Expand Down Expand Up @@ -440,7 +440,7 @@ jobs:
uses: gradle/gradle-build-action@v2
# Addressing the Oracle container with "@localhost" in the JDBC url cause the JDBC connection to fail for an unknown reason. As a workaround, using instead the container ip address is working.
with:
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:oracle:thin:@${{ env.oracle_db_ip }}:1521/FREEPDB1 -Dscalardb.jdbc.username=SYSTEM -Dscalardb.jdbc.password=Oracle
arguments: integrationTestJdbc -Dscalardb.jdbc.url=jdbc:oracle:thin:@${{ env.oracle_db_ip }}:1521/FREEPDB1 -Dscalardb.jdbc.username=SYSTEM -Dscalardb.jdbc.password=Oracle -Dscalardb.jdbc.database.version.major=23

- name: Upload Gradle test reports
if: always()
Expand Down
Expand Up @@ -25,7 +25,8 @@ protected Properties getProps(String testName) {
@Override
protected Map<String, TableMetadata> createExistingDatabaseWithAllDataTypes()
throws SQLException {
return testUtils.createExistingDatabaseWithAllDataTypes(getNamespace());
return testUtils.createExistingDatabaseWithAllDataTypes(
JdbcEnv.getJdbcDatabaseMajorVersion(), getNamespace());
}

@Override
Expand Down
Expand Up @@ -25,7 +25,8 @@ protected Properties getProperties(String testName) {
@Override
protected Map<String, TableMetadata> createExistingDatabaseWithAllDataTypes()
throws SQLException {
return testUtils.createExistingDatabaseWithAllDataTypes(getNamespace());
return testUtils.createExistingDatabaseWithAllDataTypes(
JdbcEnv.getJdbcDatabaseMajorVersion(), getNamespace());
}

@Override
Expand Down
Expand Up @@ -15,9 +15,11 @@
import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.dbcp2.BasicDataSource;

public class JdbcAdminImportTestUtils {
static final String SUPPORTED_TABLE_NAME = "supported_table";
static final List<String> UNSUPPORTED_DATA_TYPES_MYSQL =
Arrays.asList(
"BIGINT UNSIGNED",
Expand Down Expand Up @@ -54,7 +56,6 @@ public class JdbcAdminImportTestUtils {
"numeric(8,2)",
"path",
"pg_lsn",
"pg_snapshot", // after v14
"point",
"polygon",
"serial",
Expand All @@ -68,6 +69,8 @@ public class JdbcAdminImportTestUtils {
"txid_snapshot",
"uuid",
"xml");
static final List<String> UNSUPPORTED_DATA_TYPES_PGSQL_V13_OR_LATER =
Collections.singletonList("pg_snapshot");
static final List<String> UNSUPPORTED_DATA_TYPES_ORACLE =
Arrays.asList(
"BFILE",
Expand All @@ -76,13 +79,14 @@ public class JdbcAdminImportTestUtils {
"INT",
"INTERVAL YEAR(3) TO MONTH",
"INTERVAL DAY(2) TO SECOND",
"JSON",
"NUMBER(16,0)",
"ROWID",
"TIMESTAMP",
"TIMESTAMP WITH TIME ZONE",
"TIMESTAMP WITH LOCAL TIME ZONE",
"UROWID");
static final List<String> UNSUPPORTED_DATA_TYPES_ORACLE_V20_OR_LATER =
Collections.singletonList("JSON");
static final List<String> UNSUPPORTED_DATA_TYPES_MSSQL =
Arrays.asList(
"date",
Expand All @@ -109,67 +113,19 @@ public JdbcAdminImportTestUtils(Properties properties) {
rdbEngine = RdbEngineFactory.create(config);
}

public Map<String, TableMetadata> createExistingDatabaseWithAllDataTypes(String namespace)
throws SQLException {
Map<String, TableMetadata> results = new HashMap<>();
List<String> sqls = new ArrayList<>();
LinkedHashMap<String, String> goodTableColumns;
TableMetadata goodTableMetadata;
Map<String, String> badTables;
public Map<String, TableMetadata> createExistingDatabaseWithAllDataTypes(
int majorVersion, String namespace) throws SQLException {
if (rdbEngine instanceof RdbEngineMysql) {
goodTableColumns = prepareColumnsForMysql();
goodTableMetadata = prepareTableMetadataForMysql();
if (JdbcEnv.isMariaDB()) {
badTables =
prepareCreateNonImportableTableSql(
namespace,
UNSUPPORTED_DATA_TYPES_MYSQL.stream()
.filter(type -> !type.equalsIgnoreCase("JSON"))
.collect(Collectors.toList()));
} else {
badTables = prepareCreateNonImportableTableSql(namespace, UNSUPPORTED_DATA_TYPES_MYSQL);
}
return createExistingMysqlDatabaseWithAllDataTypes(namespace);
} else if (rdbEngine instanceof RdbEnginePostgresql) {
goodTableColumns = prepareColumnsForPostgresql();
goodTableMetadata = prepareTableMetadataForPostgresql();
badTables = prepareCreateNonImportableTableSql(namespace, UNSUPPORTED_DATA_TYPES_PGSQL);
return createExistingPostgresDatabaseWithAllDataTypes(majorVersion, namespace);
} else if (rdbEngine instanceof RdbEngineOracle) {
goodTableColumns = prepareColumnsForOracle();
goodTableMetadata = prepareTableMetadataForOracle();
badTables = prepareCreateNonImportableTableSql(namespace, UNSUPPORTED_DATA_TYPES_ORACLE);

// LONG columns must be tested with separated tables since they cannot be coexisted
TableMetadata longRawMetadata = prepareTableMetadataForOracleForLongRaw();
sqls.add(
prepareCreateTableSql(
namespace,
"good_table_long_raw",
prepareColumnsForOracleLongRaw(),
longRawMetadata.getPartitionKeyNames()));
results.put("good_table_long_raw", longRawMetadata);
return createExistingOracleDatabaseWithAllDataTypes(majorVersion, namespace);
} else if (rdbEngine instanceof RdbEngineSqlServer) {
goodTableColumns = prepareColumnsForSqlServer();
goodTableMetadata = prepareTableMetadataForSqlServer();
badTables = prepareCreateNonImportableTableSql(namespace, UNSUPPORTED_DATA_TYPES_MSSQL);
return createExistingSqlServerDatabaseWithAllDataTypes(namespace);
} else {
throw new RuntimeException();
}

// table with all supported columns
sqls.add(
prepareCreateTableSql(
namespace, "good_table", goodTableColumns, goodTableMetadata.getPartitionKeyNames()));
results.put("good_table", goodTableMetadata);

// tables with an unsupported column
badTables.forEach(
(table, sql) -> {
sqls.add(sql);
results.put(table, null);
});

execute(sqls.toArray(new String[0]));
return results;
}

public void dropTable(String namespace, String table) throws SQLException {
Expand Down Expand Up @@ -435,4 +391,146 @@ private String prepareCreateTableSql(
+ primaryKeys.stream().map(rdbEngine::enclose).collect(Collectors.joining(","))
+ "))";
}

private Map<String, TableMetadata> createExistingMysqlDatabaseWithAllDataTypes(String namespace)
throws SQLException {
TableMetadata tableMetadata = prepareTableMetadataForMysql();
Map<String, String> supportedTables =
Collections.singletonMap(
SUPPORTED_TABLE_NAME,
prepareCreateTableSql(
namespace,
SUPPORTED_TABLE_NAME,
prepareColumnsForMysql(),
tableMetadata.getPartitionKeyNames()));
Map<String, TableMetadata> supportedTableMetadata =
Collections.singletonMap(SUPPORTED_TABLE_NAME, tableMetadata);

Map<String, String> unsupportedTables;
if (JdbcEnv.isMariaDB()) {
unsupportedTables =
prepareCreateNonImportableTableSql(
namespace,
UNSUPPORTED_DATA_TYPES_MYSQL.stream()
.filter(type -> !type.equalsIgnoreCase("JSON"))
.collect(Collectors.toList()));
} else {
unsupportedTables =
prepareCreateNonImportableTableSql(namespace, UNSUPPORTED_DATA_TYPES_MYSQL);
}

return executeCreateTableSql(supportedTables, supportedTableMetadata, unsupportedTables);
}

private Map<String, TableMetadata> createExistingPostgresDatabaseWithAllDataTypes(
int majorVersion, String namespace) throws SQLException {
TableMetadata tableMetadata = prepareTableMetadataForPostgresql();
Map<String, String> supportedTables =
Collections.singletonMap(
SUPPORTED_TABLE_NAME,
prepareCreateTableSql(
namespace,
SUPPORTED_TABLE_NAME,
prepareColumnsForPostgresql(),
tableMetadata.getPartitionKeyNames()));
Map<String, TableMetadata> supportedTableMetadata =
Collections.singletonMap(SUPPORTED_TABLE_NAME, tableMetadata);

Map<String, String> unsupportedTables =
prepareCreateNonImportableTableSql(
namespace,
majorVersion >= 14
? Stream.concat(
UNSUPPORTED_DATA_TYPES_PGSQL.stream(),
UNSUPPORTED_DATA_TYPES_PGSQL_V13_OR_LATER.stream())
.collect(Collectors.toList())
: UNSUPPORTED_DATA_TYPES_PGSQL);

return executeCreateTableSql(supportedTables, supportedTableMetadata, unsupportedTables);
}

private Map<String, TableMetadata> createExistingOracleDatabaseWithAllDataTypes(
int majorVersion, String namespace) throws SQLException {
Map<String, String> supportedTables = new HashMap<>();
Map<String, TableMetadata> supportedTableMetadata = new HashMap<>();

TableMetadata tableMetadata = prepareTableMetadataForOracle();
supportedTables.put(
SUPPORTED_TABLE_NAME,
prepareCreateTableSql(
namespace,
SUPPORTED_TABLE_NAME,
prepareColumnsForOracle(),
tableMetadata.getPartitionKeyNames()));
supportedTableMetadata.put(SUPPORTED_TABLE_NAME, tableMetadata);

// LONG columns must be tested with separated tables since they cannot be coexisted
TableMetadata longRawTableMetadata = prepareTableMetadataForOracleForLongRaw();
supportedTables.put(
SUPPORTED_TABLE_NAME + "_long_raw",
prepareCreateTableSql(
namespace,
SUPPORTED_TABLE_NAME + "_long_raw",
prepareColumnsForOracleLongRaw(),
longRawTableMetadata.getPartitionKeyNames()));
supportedTableMetadata.put(SUPPORTED_TABLE_NAME + "_long_raw", longRawTableMetadata);

Map<String, String> unsupportedTables =
prepareCreateNonImportableTableSql(
namespace,
majorVersion >= 20
? Stream.concat(
UNSUPPORTED_DATA_TYPES_ORACLE.stream(),
UNSUPPORTED_DATA_TYPES_ORACLE_V20_OR_LATER.stream())
.collect(Collectors.toList())
: UNSUPPORTED_DATA_TYPES_ORACLE);

return executeCreateTableSql(supportedTables, supportedTableMetadata, unsupportedTables);
}

private Map<String, TableMetadata> createExistingSqlServerDatabaseWithAllDataTypes(
String namespace) throws SQLException {
TableMetadata tableMetadata = prepareTableMetadataForSqlServer();
Map<String, String> supportedTables =
Collections.singletonMap(
SUPPORTED_TABLE_NAME,
prepareCreateTableSql(
namespace,
SUPPORTED_TABLE_NAME,
prepareColumnsForSqlServer(),
tableMetadata.getPartitionKeyNames()));
Map<String, TableMetadata> supportedTableMetadata =
Collections.singletonMap(SUPPORTED_TABLE_NAME, tableMetadata);

Map<String, String> unsupportedTables =
prepareCreateNonImportableTableSql(namespace, UNSUPPORTED_DATA_TYPES_MSSQL);

return executeCreateTableSql(supportedTables, supportedTableMetadata, unsupportedTables);
}

private Map<String, TableMetadata> executeCreateTableSql(
Map<String, String> supportedTables,
Map<String, TableMetadata> supportedTableMetadata,
Map<String, String> unsupportedTables)
throws SQLException {
Map<String, TableMetadata> results = new HashMap<>();
List<String> sqls = new ArrayList<>();

// table with all supported columns
supportedTables.forEach(
(table, sql) -> {
sqls.add(sql);
results.put(table, supportedTableMetadata.get(table));
});

// tables with an unsupported column
unsupportedTables.forEach(
(table, sql) -> {
sqls.add(sql);
results.put(table, null);
});

execute(sqls.toArray(new String[0]));
return results;
}
}
Expand Up @@ -8,6 +8,7 @@ public final class JdbcEnv {
private static final String PROP_JDBC_USERNAME = "scalardb.jdbc.username";
private static final String PROP_JDBC_PASSWORD = "scalardb.jdbc.password";
private static final String PROP_JDBC_MARIADB = "scalardb.jdbc.mariadb";
private static final String PROP_JDBC_DB_MAJOR_VERSION = "scalardb.jdbc.database.version.major";

private static final String DEFAULT_JDBC_URL = "jdbc:mysql://localhost:3306/";
private static final String DEFAULT_JDBC_USERNAME = "root";
Expand Down Expand Up @@ -43,4 +44,8 @@ public static boolean isSqlite() {
public static boolean isMariaDB() {
return Boolean.getBoolean(PROP_JDBC_MARIADB);
}

public static int getJdbcDatabaseMajorVersion() {
return Integer.getInteger(PROP_JDBC_DB_MAJOR_VERSION, Integer.MAX_VALUE);
}
}
Expand Up @@ -30,7 +30,8 @@ protected Properties getProperties(String testName) {
@Override
protected Map<String, TableMetadata> createExistingDatabaseWithAllDataTypes()
throws SQLException {
return testUtils.createExistingDatabaseWithAllDataTypes(getNamespace());
return testUtils.createExistingDatabaseWithAllDataTypes(
JdbcEnv.getJdbcDatabaseMajorVersion(), getNamespace());
}

@Override
Expand Down

0 comments on commit 0782ebe

Please sign in to comment.