Skip to content

Commit

Permalink
Adding unit tests for BQ
Browse files Browse the repository at this point in the history
  • Loading branch information
Schuemie authored and Schuemie committed Oct 22, 2023
1 parent bd94697 commit baa049f
Show file tree
Hide file tree
Showing 3 changed files with 38 additions and 9 deletions.
4 changes: 4 additions & 0 deletions .github/workflows/R_CMD_check_Hades.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,10 @@ jobs:
CDM5_SPARK_CONNECTION_STRING: ${{ secrets.CDM5_SPARK_CONNECTION_STRING }}
CDM5_SPARK_CDM_SCHEMA: ${{ secrets.CDM5_SPARK_CDM_SCHEMA }}
CDM5_SPARK_OHDSI_SCHEMA: ${{ secrets.CDM5_SPARK_OHDSI_SCHEMA }}
CDM_BIG_QUERY_CONNECTION_STRING: ${{ secrets.CDM_BIG_QUERY_CONNECTION_STRING }}
CDM_BIG_QUERY_KEY_FILE: ${{ secrets.CDM_BIG_QUERY_KEY_FILE }}
CDM_BIG_QUERY_CDM_SCHEMA: ${{ secrets.CDM_BIG_QUERY_CDM_SCHEMA }}
CDM_BIG_QUERY_OHDSI_SCHEMA: ${{ secrets.CDM_BIG_QUERY_OHDSI_SCHEMA }}

steps:
- uses: actions/checkout@v3
Expand Down
37 changes: 30 additions & 7 deletions tests/testthat/setup.R
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
library(DatabaseConnector)

# Download the JDBC drivers used in the tests ----------------------------------
if (Sys.getenv("DONT_DOWNLOAD_JDBC_DRIVERS", "") != "TRUE") {
oldJarFolder <- Sys.getenv("DATABASECONNECTOR_JAR_FOLDER")
Expand All @@ -10,6 +11,7 @@ if (Sys.getenv("DONT_DOWNLOAD_JDBC_DRIVERS", "") != "TRUE") {
downloadJdbcDrivers("redshift")
downloadJdbcDrivers("spark")
downloadJdbcDrivers("snowflake")
downloadJdbcDrivers("bigquery")

if (testthat::is_testing()) {
withr::defer({
Expand All @@ -21,6 +23,13 @@ if (Sys.getenv("DONT_DOWNLOAD_JDBC_DRIVERS", "") != "TRUE") {
}
}

# Helper functions -------------------------------------------------------------
addDbmsToLabel <- function(label, testServer) {
# Test sections are not shown in R check, so also printing them here:
writeLines(sprintf("Test: %s (%s)", label, testServer$connectionDetails$dbms))
return(sprintf("%s (%s)", label, testServer$connectionDetails$dbms))
}

# Create a list with testing server details ------------------------------
testServers <- list()

Expand Down Expand Up @@ -138,6 +147,27 @@ testServers[[length(testServers) + 1]] <- list(
tempEmulationSchema = Sys.getenv("CDM5_SPARK_OHDSI_SCHEMA")
)

# BigQuery
bqKeyFile <- tempfile(fileext = ".json")
writeLines(Sys.getenv("CDM_BIG_QUERY_KEY_FILE"), bqKeyFile)
if (testthat::is_testing()) {
withr::defer(unlink(bqKeyFile, force = TRUE), testthat::teardown_env())
}
bqConnectionString <- gsub("<keyfile path>",
normalizePath(bqKeyFile, winslash = "/"),
Sys.getenv("CDM_BIG_QUERY_CONNECTION_STRING"))
testServers[[length(testServers) + 1]] <- list(
connectionDetails = details <- createConnectionDetails(
dbms = "bigquery",
user = "",
password = "",
connectionString = !!bqConnectionString
),
NULL,
cdmDatabaseSchema = Sys.getenv("CDM_BIG_QUERY_CDM_SCHEMA"),
tempEmulationSchema = Sys.getenv("CDM_BIG_QUERY_OHDSI_SCHEMA")
)

# SQLite
sqliteFile <- tempfile(fileext = ".sqlite")
if (testthat::is_testing()) {
Expand Down Expand Up @@ -233,10 +263,3 @@ testServers[[length(testServers) + 1]] <- list(
cdmDatabaseSchema = cdmDatabaseSchema,
tempEmulationSchema = NULL
)

addDbmsToLabel <- function(label, testServer) {
# Test sections are not shown in R check, so also printing them here:
writeLines(sprintf("Test: %s (%s)", label, testServer$connectionDetails$dbms))
return(sprintf("%s (%s)", label, testServer$connectionDetails$dbms))
}

6 changes: 4 additions & 2 deletions tests/testthat/test-insertTable.R
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ data$big_ints[8] <- 3.3043e+10

for (testServer in testServers) {
test_that(addDbmsToLabel("Insert data", testServer), {
if (testServer$connectionDetails$dbms == "redshift") {
# Inserting on RedShift is slow (Without bulk upload), so
if (testServer$connectionDetails$dbms %in% c("redshift", "bigquery")) {
# Inserting on RedShift or BigQuery is slow (Without bulk upload), so
# taking subset:
dataCopy1 <- data[1:10, ]
} else {
Expand Down Expand Up @@ -94,6 +94,8 @@ for (testServer in testServers) {
expect_equal(as.character(columnInfo$field.type), c("DATE", "TIMESTAMPNTZ", "NUMBER", "DOUBLE", "VARCHAR", "NUMBER"))
} else if (dbms == "spark") {
expect_equal(as.character(columnInfo$field.type), c("DATE", "TIMESTAMP", "INT", "FLOAT", "STRING", "BIGINT"))
} else if (dbms == "bigquery") {
expect_equal(as.character(columnInfo$field.type), c("DATE", "DATETIME", "INT64", "FLOAT64", "STRING", "INT64"))
} else {
warning("Unable to check column types for ", dbms)
}
Expand Down

0 comments on commit baa049f

Please sign in to comment.