From 7a6bcdbecf7785f98c9c2ae171e66f77afd3b554 Mon Sep 17 00:00:00 2001 From: Marcus Codrescu Date: Wed, 22 Mar 2023 08:30:05 +0000 Subject: [PATCH] version 0.1.2 --- DESCRIPTION | 21 + LICENSE | 2 + MD5 | 27 ++ NAMESPACE | 51 ++ R/duckdb_connection.R | 190 ++++++++ R/get_database_functions.R | 102 ++++ R/list_drivers.R | 15 + R/mysql_connection.R | 215 +++++++++ R/postgres_connection.R | 213 +++++++++ R/snowflake_connection.R | 224 +++++++++ R/sqlite_connection.R | 200 ++++++++ R/submit_query.R | 47 ++ R/table_modal_w_download.R | 86 ++++ R/vertica_connection.R | 197 ++++++++ R/view_database.R | 513 +++++++++++++++++++++ README.md | 71 +++ man/list_drivers.Rd | 14 + man/view_database.Rd | 20 + tests/testthat.R | 12 + tests/testthat/test-duckdb_connection.R | 179 +++++++ tests/testthat/test-list_drivers.R | 16 + tests/testthat/test-mysql_connection.R | 206 +++++++++ tests/testthat/test-postgres_connection.R | 206 +++++++++ tests/testthat/test-snowflake_connection.R | 213 +++++++++ tests/testthat/test-sqlite_connection.R | 183 ++++++++ tests/testthat/test-submit_query.R | 148 ++++++ tests/testthat/test-vertica_connection.R | 216 +++++++++ tests/testthat/test-view_database.R | 21 + 28 files changed, 3608 insertions(+) create mode 100644 DESCRIPTION create mode 100644 LICENSE create mode 100644 MD5 create mode 100644 NAMESPACE create mode 100644 R/duckdb_connection.R create mode 100644 R/get_database_functions.R create mode 100644 R/list_drivers.R create mode 100644 R/mysql_connection.R create mode 100644 R/postgres_connection.R create mode 100644 R/snowflake_connection.R create mode 100644 R/sqlite_connection.R create mode 100644 R/submit_query.R create mode 100644 R/table_modal_w_download.R create mode 100644 R/vertica_connection.R create mode 100644 R/view_database.R create mode 100644 README.md create mode 100644 man/list_drivers.Rd create mode 100644 man/view_database.Rd create mode 100644 tests/testthat.R create mode 100644 tests/testthat/test-duckdb_connection.R create mode 100644 tests/testthat/test-list_drivers.R create mode 100644 tests/testthat/test-mysql_connection.R create mode 100644 tests/testthat/test-postgres_connection.R create mode 100644 tests/testthat/test-snowflake_connection.R create mode 100644 tests/testthat/test-sqlite_connection.R create mode 100644 tests/testthat/test-submit_query.R create mode 100644 tests/testthat/test-vertica_connection.R create mode 100644 tests/testthat/test-view_database.R diff --git a/DESCRIPTION b/DESCRIPTION new file mode 100644 index 0000000..5d7ccc8 --- /dev/null +++ b/DESCRIPTION @@ -0,0 +1,21 @@ +Package: octopus +Type: Package +Title: A Database Management Tool +Version: 0.1.2 +Authors@R: person("Marcus", "Codrescu", , "m.codrescu@outlook.com", role = c("aut", "cre")) +Maintainer: Marcus Codrescu +Description: A database management tool built as a 'shiny' application. Connect to various + databases to send queries, upload files, preview tables, and more. +License: MIT + file LICENSE +Encoding: UTF-8 +Suggests: duckdb, keyring, odbc, readr, RMySQL, RPostgres, RSQLite, + testthat (>= 3.0.0) +Config/testthat/edition: 3 +Imports: bslib, DBI, dplyr, DT, glue, httr, janitor, rio, shiny, + shinyAce, shinyjs, utils +RoxygenNote: 7.2.1 +NeedsCompilation: no +Packaged: 2023-03-22 02:20:22 UTC; mc678p +Author: Marcus Codrescu [aut, cre] +Repository: CRAN +Date/Publication: 2023-03-22 09:30:05 UTC diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..ff8b0ca --- /dev/null +++ b/LICENSE @@ -0,0 +1,2 @@ +YEAR: 2023 +COPYRIGHT HOLDER: octopus authors diff --git a/MD5 b/MD5 new file mode 100644 index 0000000..ab6992c --- /dev/null +++ b/MD5 @@ -0,0 +1,27 @@ +469d536bb115f26b9f3e2706d4e3fad0 *DESCRIPTION +3e1863b11eeb4064746c5621d02ee3b5 *LICENSE +05ecbebff9cf3b09dfc7371aab85c228 *NAMESPACE +5881f8ceddd4f99c01c3c917ea6cd800 *R/duckdb_connection.R +367fb27250d764a6e51c96d8be95d20a *R/get_database_functions.R +86c2e907d985204877e1bf0a2daca740 *R/list_drivers.R +bdc8850a82cbb2e519eb89a15079378d *R/mysql_connection.R +da906db3adb62f15c7387a18394c2ae2 *R/postgres_connection.R +7e57d5b94129cc1d5a661c860f894817 *R/snowflake_connection.R +3f599c544cd113f9096a4df584a7c18d *R/sqlite_connection.R +3d294a35010d3b37b6e48bad79e18556 *R/submit_query.R +0f591c8e2e4f4a3a1ac35ba56d7e3513 *R/table_modal_w_download.R +a49d8894e75861df9f6b99a87366910c *R/vertica_connection.R +2fc0c509049f76366941bfad4b75e928 *R/view_database.R +fbe4ac7e45949197fdb90c7b3c116472 *README.md +d1474ddceea47e984eecb41867997494 *man/list_drivers.Rd +8ff237823b0797721426490f227c8f17 *man/view_database.Rd +2cdc915fcce6cb2afcfe9c42c75118a3 *tests/testthat.R +9162a50b22f38c596c31497507b38964 *tests/testthat/test-duckdb_connection.R +7f792314138b019f389572c8bb5a22be *tests/testthat/test-list_drivers.R +737544f33d7b91ab0066ea4a42857d68 *tests/testthat/test-mysql_connection.R +f1bcd36e3767639c2c8df4f36f86b6c8 *tests/testthat/test-postgres_connection.R +d7e2abfde199f8cefac0ea80fa64df99 *tests/testthat/test-snowflake_connection.R +0b1c94c23d404b1aea8e48db1314ffdd *tests/testthat/test-sqlite_connection.R +aba724e504ec003d87146be2a73b0119 *tests/testthat/test-submit_query.R +fe9ca6b58d7265c7a5036adc340db94b *tests/testthat/test-vertica_connection.R +dc8aaf850254b9301cc85585aeab0b61 *tests/testthat/test-view_database.R diff --git a/NAMESPACE b/NAMESPACE new file mode 100644 index 0000000..fcb56ea --- /dev/null +++ b/NAMESPACE @@ -0,0 +1,51 @@ +# Generated by roxygen2: do not edit by hand + +export(list_drivers) +export(view_database) +importFrom(DBI,Id) +importFrom(DBI,dbClearResult) +importFrom(DBI,dbGetQuery) +importFrom(DBI,dbListTables) +importFrom(DBI,dbSendQuery) +importFrom(DBI,dbWriteTable) +importFrom(DT,renderDataTable) +importFrom(bslib,bs_theme) +importFrom(dplyr,pull) +importFrom(glue,glue) +importFrom(httr,GET) +importFrom(httr,content) +importFrom(httr,use_proxy) +importFrom(janitor,clean_names) +importFrom(rio,import) +importFrom(shiny,NS) +importFrom(shiny,bootstrapPage) +importFrom(shiny,div) +importFrom(shiny,downloadButton) +importFrom(shiny,downloadHandler) +importFrom(shiny,fileInput) +importFrom(shiny,h3) +importFrom(shiny,modalButton) +importFrom(shiny,modalDialog) +importFrom(shiny,moduleServer) +importFrom(shiny,observeEvent) +importFrom(shiny,p) +importFrom(shiny,removeModal) +importFrom(shiny,req) +importFrom(shiny,selectInput) +importFrom(shiny,shinyApp) +importFrom(shiny,showModal) +importFrom(shiny,showNotification) +importFrom(shiny,stopApp) +importFrom(shiny,tagList) +importFrom(shiny,tags) +importFrom(shiny,updateSelectInput) +importFrom(shiny,updateSelectizeInput) +importFrom(shinyAce,aceEditor) +importFrom(shinyAce,updateAceEditor) +importFrom(shinyjs,hideElement) +importFrom(shinyjs,html) +importFrom(shinyjs,onclick) +importFrom(shinyjs,onevent) +importFrom(shinyjs,showElement) +importFrom(shinyjs,useShinyjs) +importFrom(utils,write.csv) diff --git a/R/duckdb_connection.R b/R/duckdb_connection.R new file mode 100644 index 0000000..5b69537 --- /dev/null +++ b/R/duckdb_connection.R @@ -0,0 +1,190 @@ +#' A Database Specific Function To Retrieve Database Schemas +#' @noRd +#' +#' @param con A database connection object. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom dplyr pull +#' +#' @return A character vector of all schemas in the database. +get_schemas_duckdb <- function(con) { + + DBI::dbGetQuery( + con, + " + SELECT DISTINCT schema_name + FROM information_schema.schemata + " + ) |> + dplyr::pull(1) +} + +#' A Database Specific Function to Retrieve All Database Tables +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A +#' +#' @importFrom DBI dbListTables +#' +#' @return A character vector of all tables in a given schema. +get_tables_duckdb <- function(con, schema) { + + DBI::dbGetQuery( + con, + glue::glue( + " + SELECT table_name + FROM information_schema.tables + WHERE table_schema = '{schema}' + " + ) + ) |> + dplyr::pull(1) + +} + +#' A Database Specific Function to Retrieve the Number of Rows of a Table +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' @param query A string containing the query to send. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' @importFrom dplyr pull +#' +#' @return An integer for the number of rows in the table. +get_n_rows_duckdb <- function(con, schema, table, query = "") { + + if (query != ""){ + if (!grepl("^SELECT", trimws(query), ignore.case = TRUE)){ + return(0) + } + query_string <- + glue::glue( + " + SELECT COUNT(*) AS count + FROM ( + {query} + ) AS subquery + " + ) + } else { + query_string <- + glue::glue( + " + SELECT COUNT(*) AS count + FROM ( + SELECT * + FROM \"{schema}\".\"{table}\" + ) AS subquery; + " + ) + } + + DBI::dbGetQuery( + con, + query_string + ) |> + dplyr::pull(1) + + +} + +#' A Database Specific Function for Retrieving Table Previews +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' +#' @return A data frame of 100 rows from the database table. +get_preview_duckdb <- function(con, schema, table) { + DBI::dbGetQuery( + con, + glue::glue( + " + SELECT * + FROM \"{schema}\".\"{table}\" + LIMIT 100; + " + ) + ) +} + + +#' A Database Specific Function for Dropping Tables +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' +#' @importFrom DBI dbSendQuery +#' @importFrom DBI dbClearResult +#' @importFrom glue glue +#' +#' @return A result string. Either "Success" or an error message. +delete_table_duckdb <- function(con, schema, table){ + + DBI::dbSendQuery( + con, + glue::glue( + "DROP TABLE \"{schema}\".\"{table}\"" + ) + ) |> + DBI::dbClearResult() + + "Success" + +} + + +#' A Database Specific Function for Uploading Data Frames +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table_name A string for the new table name. +#' @param data A data frame to be uploaded. +#' @param temporary A logical value. Should the table be temporary? +#' +#' @importFrom DBI dbWriteTable +#' @importFrom DBI dbSendQuery +#' @importFrom DBI dbClearResult +#' +#' @return A result string. Either "Success" or an error message. +write_table_duckdb <- + function( + con, + schema, + table_name, + data, + temporary = FALSE + ){ + + DBI::dbSendQuery( + con, + glue::glue( + "SET SEARCH_PATH TO '{schema}'" + ) + ) |> + DBI::dbClearResult() + + DBI::dbWriteTable( + con, + name = table_name, + value = data.frame(data), + overwrite = TRUE, + temporary = temporary + ) + + "Success" + + } diff --git a/R/get_database_functions.R b/R/get_database_functions.R new file mode 100644 index 0000000..0a492ee --- /dev/null +++ b/R/get_database_functions.R @@ -0,0 +1,102 @@ +#' Get Database Driver Specific Functions +#' @noRd +#' +#' @param driver A string database connection driver name. +#' +#' @return A list of database specific functions. +get_database_functions <- + function( + driver + ){ + if (driver == "PqConnection"){ + return ( + list( + get_schemas_postgres, + get_tables_postgres, + get_n_rows_postgres, + get_preview_postgres, + delete_table_postgres, + write_table_postgres + ) + ) + + } else if (driver == "Snowflake"){ + return ( + list( + get_schemas_snowflake, + get_tables_snowflake, + get_n_rows_snowflake, + get_preview_snowflake, + delete_table_snowflake, + write_table_snowflake + ) + ) + + } else if (driver == "Vertica Database"){ + return ( + list( + get_schemas_vertica, + get_tables_vertica, + get_n_rows_vertica, + get_preview_vertica, + delete_table_vertica, + write_table_vertica + ) + ) + + } else if (driver == "duckdb_connection"){ + return ( + list( + get_schemas_duckdb, + get_tables_duckdb, + get_n_rows_duckdb, + get_preview_duckdb, + delete_table_duckdb, + write_table_duckdb + ) + ) + + } else if (driver == "MySQLConnection"){ + return ( + list( + get_schemas_mysql, + get_tables_mysql, + get_n_rows_mysql, + get_preview_mysql, + delete_table_mysql, + write_table_mysql + ) + ) + + } else if (driver == "SQLiteConnection"){ + return ( + list( + get_schemas_sqlite, + get_tables_sqlite, + get_n_rows_sqlite, + get_preview_sqlite, + delete_table_sqlite, + write_table_sqlite + ) + ) + + } else if (driver == "FaultyConnectionExample"){ + return ( + list( + \(con) stop("This is a get_schemas error"), + \(con, schema) stop("This is a get_tables error"), + \(con, schema, table, query) stop("This is a get_n_rows error"), + \(con, schema, table) stop("This is a get_preview error"), + \(con, schema, table) stop("This is a delete_table error"), + \(con, schema, table_name, data, temporary) stop("This is a write_table error") + ) + ) + + } else { + stop( + "Database driver not found. + Please use octopus::list_drivers() to see a list of all available drivers. + " + ) + } + } diff --git a/R/list_drivers.R b/R/list_drivers.R new file mode 100644 index 0000000..41370d5 --- /dev/null +++ b/R/list_drivers.R @@ -0,0 +1,15 @@ +#' List Compatible Database Drivers +#' +#' @return A character vector of compatible database drivers. +#' @export +list_drivers <- + function(){ + c( + "PqConnection", + "Snowflake", + "Vertica Database", + "duckdb_connection", + "MySQLConnection", + "SQLiteConnection" + ) + } diff --git a/R/mysql_connection.R b/R/mysql_connection.R new file mode 100644 index 0000000..771555a --- /dev/null +++ b/R/mysql_connection.R @@ -0,0 +1,215 @@ +#' A Database Specific Function To Retrieve Database Schemas +#' @noRd +#' +#' @param con A database connection object. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom dplyr pull +#' +#' @return A character vector of all schemas in the database. +get_schemas_mysql <- function(con) { + schemas <- + DBI::dbGetQuery( + con, + " + SELECT schema_name + FROM information_schema.schemata + ORDER BY schema_name; + " + ) |> + dplyr::pull(1) +} + +#' A Database Specific Function to Retrieve All Database Tables +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' @importFrom dplyr pull +#' +#' @return A character vector of all tables in a given schema. +get_tables_mysql <- function(con, schema) { + result <- tryCatch({ + DBI::dbGetQuery( + con, + glue::glue( + " + SELECT table_name + FROM information_schema.tables + WHERE table_schema = '{schema}' + ORDER BY table_name; + " + ) + ) |> + dplyr::pull(1) + + }, error = function(error){ + data.frame( + error = error$message + ) + }) + +} + +#' A Database Specific Function to Retrieve the Number of Rows of a Table +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' @param query A string containing the query to send. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' @importFrom dplyr pull +#' +#' @return An integer for the number of rows in the table. +get_n_rows_mysql <- function(con, schema, table, query = "") { + + if (query != ""){ + if (!grepl("^SELECT", trimws(query), ignore.case = TRUE)){ + return(0) + } + query_string <- + glue::glue( + " + SELECT COUNT(*) AS count + FROM ( + {query} + ) AS subquery; + " + ) + } else { + query_string <- + glue::glue( + " + SELECT COUNT(*) AS count + FROM ( + SELECT * + FROM `{schema}`.`{table}` + ) AS subquery; + " + ) + } + + result <- + tryCatch({ + result <- + DBI::dbGetQuery( + con, + query_string + ) |> + dplyr::pull(1) + + }, error = function(error){ + result <- 0 + }) + + +} + +#' A Database Specific Function for Retrieving Table Previews +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' +#' @return A data frame of 100 rows from the database table. +get_preview_mysql <- function(con, schema, table) { + DBI::dbGetQuery( + con, + glue::glue( + " + SELECT * + FROM `{schema}`.`{table}` + LIMIT 100; + " + ) + ) +} + + +#' A Database Specific Function for Dropping Tables +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' +#' @importFrom DBI dbSendQuery +#' @importFrom glue glue +#' +#' @return A result string. Either "Success" or an error message. +delete_table_mysql <- function(con, schema, table){ + + result <- + tryCatch({ + DBI::dbSendQuery( + con, + glue::glue( + "DROP TABLE `{schema}`.`{table}`" + ) + ) + result <- "Success" + }, error = function(error){ + result <- error$message + }) + +} + + +#' A Database Specific Function for Uploading Data Frames +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table_name A string for the new table name. +#' @param data A data frame to be uploaded. +#' @param temporary A logical value. Should the table be temporary? +#' +#' @importFrom DBI dbWriteTable +#' @importFrom DBI Id +#' +#' @return A result string. Either "Success" or an error message. +write_table_mysql <- + function( + con, + schema, + table_name, + data, + temporary = FALSE + ){ + + res <- DBI::dbSendQuery( + con, + glue::glue( + "USE {schema}" + ) + ) + + DBI::dbClearResult(res) + + result <- + tryCatch({ + DBI::dbWriteTable( + con, + name = table_name, + value = data.frame(data), + overwrite = TRUE, + temporary = temporary, + row.names = FALSE + ) + + result <- "Success" + }, error = function(error){ + result <- error$message + }) + + } diff --git a/R/postgres_connection.R b/R/postgres_connection.R new file mode 100644 index 0000000..4e83cc5 --- /dev/null +++ b/R/postgres_connection.R @@ -0,0 +1,213 @@ +#' A Database Specific Function To Retrieve Database Schemas +#' @noRd +#' +#' @param con A database connection object. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom dplyr pull +#' +#' @return A character vector of all schemas in the database. +get_schemas_postgres <- function(con) { + schemas <- + DBI::dbGetQuery( + con, + " + SELECT schema_name + FROM information_schema.schemata + ORDER BY schema_name; + " + ) |> + dplyr::pull(1) +} + +#' A Database Specific Function to Retrieve All Database Tables +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' @importFrom dplyr pull +#' +#' @return A character vector of all tables in a given schema. +get_tables_postgres <- function(con, schema) { + result <- tryCatch({ + DBI::dbGetQuery( + con, + glue::glue( + " + SELECT table_name + FROM information_schema.tables + WHERE table_schema = '{schema}' + ORDER BY table_name; + " + ) + ) |> + dplyr::pull(1) + + }, error = function(error){ + data.frame( + error = error$message + ) + }) + +} + +#' A Database Specific Function to Retrieve the Number of Rows of a Table +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' @param query A string containing the query to send. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' @importFrom dplyr pull +#' +#' @return An integer for the number of rows in the table. +get_n_rows_postgres <- function(con, schema, table, query = "") { + + if (query != ""){ + + if (!grepl("^SELECT", trimws(query), ignore.case = TRUE)){ + return(0) + } + + query_string <- + glue::glue( + " + SELECT COUNT(*) AS count + FROM ( + {query} + ) AS subquery + " + ) + } else { + + query_string <- + glue::glue( + " + SELECT COUNT(*) AS count + FROM ( + SELECT * + FROM \"{schema}\".\"{table}\" + ) AS subquery; + " + ) + } + + result <- + tryCatch({ + result <- + DBI::dbGetQuery( + con, + query_string + ) |> + dplyr::pull(1) + + }, error = function(error){ + result <- 0 + }) + + +} + +#' A Database Specific Function for Retrieving Table Previews +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' +#' @return A data frame of 100 rows from the database table. +get_preview_postgres <- function(con, schema, table) { + DBI::dbGetQuery( + con, + glue::glue( + " + SELECT * + FROM \"{schema}\".\"{table}\" + LIMIT 100; + " + ) + ) +} + + +#' A Database Specific Function for Dropping Tables +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' +#' @importFrom DBI dbSendQuery +#' @importFrom DBI dbClearResult +#' @importFrom glue glue +#' +#' @return A result string. Either "Success" or an error message. +delete_table_postgres <- function(con, schema, table){ + + result <- + tryCatch({ + res <- DBI::dbSendQuery( + con, + glue::glue( + "DROP TABLE \"{schema}\".\"{table}\"" + ) + ) + DBI::dbClearResult(res) + result <- "Success" + }, error = function(error){ + result <- error$message + }) + +} + + +#' A Database Specific Function for Uploading Data Frames +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table_name A string for the new table name. +#' @param data A data frame to be uploaded. +#' @param temporary A logical value. Should the table be temporary? +#' +#' @importFrom DBI dbWriteTable +#' @importFrom DBI Id +#' +#' @return A result string. Either "Success" or an error message. +write_table_postgres <- + function( + con, + schema, + table_name, + data, + temporary = FALSE + ){ + + result <- + tryCatch({ + DBI::dbWriteTable( + con, + name = DBI::Id( + table = table_name, + schema = schema + ), + value = data.frame(data), + overwrite = TRUE, + temporary = temporary + ) + + result <- "Success" + }, error = function(error){ + result <- error$message + }) + +} diff --git a/R/snowflake_connection.R b/R/snowflake_connection.R new file mode 100644 index 0000000..ce84e00 --- /dev/null +++ b/R/snowflake_connection.R @@ -0,0 +1,224 @@ +#' A Database Specific Function To Retrieve Database Schemas +#' @noRd +#' +#' @param con A database connection object. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom dplyr pull +#' +#' @return A character vector of all schemas in the database. +get_schemas_snowflake <- function(con) { + tryCatch({ + schemas <- + DBI::dbGetQuery( + con, + " + SELECT schema_name + FROM information_schema.schemata + ORDER BY schema_name; + " + ) |> + dplyr::pull(1) + }, error = function(error){ + error$message + }) + +} + +#' A Database Specific Function to Retrieve All Database Tables +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' @importFrom dplyr pull +#' +#' @return A character vector of all tables in a given schema. +get_tables_snowflake <- function(con, schema) { + result <- tryCatch({ + DBI::dbGetQuery( + con, + glue::glue( + " + SELECT table_name + FROM information_schema.tables + WHERE table_schema = '{schema}' + ORDER BY table_name; + " + ) + ) |> + dplyr::pull(1) + + }, error = function(error){ + data.frame( + error = error$message + ) + }) + +} + +#' A Database Specific Function to Retrieve the Number of Rows of a Table +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' @param query A string containing the query to send. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' @importFrom dplyr pull +#' +#' @return An integer for the number of rows in the table. +get_n_rows_snowflake <- function(con, schema, table, query = "") { + + if (query != ""){ + if (!grepl("^SELECT", trimws(query), ignore.case = TRUE)){ + return(0) + } + query_string <- + glue::glue( + " + SELECT + COUNT(*) + FROM ( + {query} + ) AS subquery; + " + ) + } else { + query_string <- + glue::glue( + " + SELECT + COUNT(*) + FROM ( + SELECT * + FROM \"{schema}\".\"{table}\" + ) AS subquery; + " + ) + } + + result <- + tryCatch({ + result <- + DBI::dbGetQuery( + con, + query_string + ) |> + dplyr::pull(1) + + }, error = function(error){ + print(error$message) + result <- 0 + }) + + +} + +#' A Database Specific Function for Retrieving Table Previews +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' +#' @return A data frame of 100 rows from the database table. +get_preview_snowflake <- function(con, schema, table) { + dbGetQuery( + con, + glue( + " + SELECT * + FROM \"{schema}\".\"{table}\" + LIMIT 100 + " + ) + ) +} + + +#' A Database Specific Function for Dropping Tables +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' +#' @importFrom DBI dbSendQuery +#' @importFrom DBI dbClearResult +#' @importFrom glue glue +#' +#' @return A result string. Either "Success" or an error message. +delete_table_snowflake <- function(con, schema, table){ + + result <- + tryCatch({ + res <- DBI::dbSendQuery( + con, + glue::glue( + "DROP TABLE \"{schema}\".\"{table}\"" + ) + ) + DBI::dbClearResult(res) + result <- "Success" + }, error = function(error){ + result <- error$message + }) + +} + + +#' A Database Specific Function for Uploading Data Frames +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table_name A string for the new table name. +#' @param data A data frame to be uploaded. +#' @param temporary A logical value. Should the table be temporary? +#' +#' @importFrom DBI dbWriteTable +#' @importFrom DBI dbSendQuery +#' @importFrom DBI dbClearResult +#' +#' @return A result string. Either "Success" or an error message. +write_table_snowflake <- + function( + con, + schema, + table_name, + data, + temporary = FALSE + ){ + + DBI::dbSendQuery( + con, + glue::glue( + "USE SCHEMA {schema}" + ) + ) |> + DBI::dbClearResult() + + result <- + tryCatch({ + DBI::dbWriteTable( + con, + name = table_name, + value = data.frame(data), + overwrite = TRUE, + temporary = temporary + ) + + result <- "Success" + }, error = function(error){ + result <- error$message + }) + + } diff --git a/R/sqlite_connection.R b/R/sqlite_connection.R new file mode 100644 index 0000000..d9ee21b --- /dev/null +++ b/R/sqlite_connection.R @@ -0,0 +1,200 @@ +#' A Database Specific Function To Retrieve Database Schemas +#' @noRd +#' +#' @param con A database connection object. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom dplyr pull +#' +#' @return A character vector of all schemas in the database. +get_schemas_sqlite <- function(con) { + schemas <- + DBI::dbGetQuery( + con, + "PRAGMA database_list;" + ) |> + pull(2) +} + +#' A Database Specific Function to Retrieve All Database Tables +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A +#' +#' @importFrom DBI dbListTables +#' +#' @return A character vector of all tables in a given schema. +get_tables_sqlite <- function(con, schema) { + result <- tryCatch({ + if (schema == "main"){ + DBI::dbGetQuery( + con, + "SELECT name FROM sqlite_schema;" + ) |> + dplyr::pull(1) + } else { + DBI::dbGetQuery( + con, + "SELECT name FROM sqlite_temp_schema" + ) |> + dplyr::pull(1) + } + }, error = function(error){ + data.frame( + error = error$message + ) + }) + +} + +#' A Database Specific Function to Retrieve the Number of Rows of a Table +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' @param query A string containing the query to send. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' @importFrom dplyr pull +#' +#' @return An integer for the number of rows in the table. +get_n_rows_sqlite <- function(con, schema, table, query = "") { + + if (query != ""){ + if (!grepl("^SELECT", trimws(query), ignore.case = TRUE)){ + return(0) + } + query_string <- + glue::glue( + " + SELECT COUNT(*) AS count + FROM ( + {query} + ) AS subquery + " + ) + } else { + query_string <- + glue::glue( + " + SELECT COUNT(*) AS count + FROM ( + SELECT * + FROM \"{schema}\".\"{table}\" + ) AS subquery; + " + ) + } + + result <- + tryCatch({ + result <- + DBI::dbGetQuery( + con, + query_string + ) |> + dplyr::pull(1) + + }, error = function(error){ + result <- 0 + }) + + +} + +#' A Database Specific Function for Retrieving Table Previews +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' +#' @return A data frame of 100 rows from the database table. +get_preview_sqlite <- function(con, schema, table) { + DBI::dbGetQuery( + con, + glue::glue( + " + SELECT * + FROM \"{schema}\".\"{table}\" + LIMIT 100; + " + ) + ) +} + + +#' A Database Specific Function for Dropping Tables +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' +#' @importFrom DBI dbSendQuery +#' @importFrom DBI dbClearResult +#' @importFrom glue glue +#' +#' @return A result string. Either "Success" or an error message. +delete_table_sqlite <- function(con, schema, table){ + + result <- + tryCatch({ + res <- DBI::dbSendQuery( + con, + glue::glue( + "DROP TABLE \"{schema}\".\"{table}\"" + ) + ) + DBI::dbClearResult(res) + result <- "Success" + }, error = function(error){ + result <- error$message + }) + +} + + +#' A Database Specific Function for Uploading Data Frames +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table_name A string for the new table name. +#' @param data A data frame to be uploaded. +#' @param temporary A logical value. Should the table be temporary? +#' +#' @importFrom DBI dbWriteTable +#' +#' @return A result string. Either "Success" or an error message. +write_table_sqlite <- + function( + con, + schema, + table_name, + data, + temporary = FALSE + ){ + + result <- + tryCatch({ + DBI::dbWriteTable( + con, + name = table_name, + value = data.frame(data), + overwrite = TRUE, + temporary = temporary + ) + + result <- "Success" + }, error = function(error){ + result <- error$message + }) + + } diff --git a/R/submit_query.R b/R/submit_query.R new file mode 100644 index 0000000..0466acd --- /dev/null +++ b/R/submit_query.R @@ -0,0 +1,47 @@ +#' Submit Query +#' @noRd +#' +#' @param query A character string of the SQL query. +#' @param con A database connection object +#' @param n_rows The number of rows of the query. +#' +#' @return A data frame of the query result or an error. +#' +submit_query <- + function( + query, + con, + n_rows + ){ + + if (query == ""){ + stop( + "Please input a query" + ) + } else if(n_rows > 50000){ + stop( + "Your query returned a result too large." + ) + } else { + + if ( + all( + grepl("SELECT", query, ignore.case = TRUE), + !grepl("CREATE", query, ignore.case = TRUE) + ) + ) { + + result <- DBI::dbGetQuery(con, query) + + } else { + + DBI::dbSendQuery(con, query) |> + DBI::dbClearResult() + + result <- data.frame(result = "Success") + + } + } + + return (result) + } diff --git a/R/table_modal_w_download.R b/R/table_modal_w_download.R new file mode 100644 index 0000000..a2186f1 --- /dev/null +++ b/R/table_modal_w_download.R @@ -0,0 +1,86 @@ +#' Table Modal with Download UI +#' @noRd +#' +#' @importFrom shiny NS +#' @importFrom shiny tagList +#' @importFrom shiny modalDialog +#' @importFrom shiny h3 +#' @importFrom shiny p +#' @importFrom shiny div +#' @importFrom shiny downloadButton +#' @importFrom shiny modalButton +#' @importFrom glue glue +#' @importFrom DT renderDataTable +#' +#' @param id The namespace Id +#' @param title The title to be displayed in the modal. +#' @param download_title The title on the download button. +#' @param n_rows The number of rows of the result. +#' @param result The data frame to display in the modal. +#' +#' @return A shiny tagList +table_modal_w_download_UI <- function(id, title, download_title, n_rows, result) { + ns <- shiny::NS(id) + shiny::tagList( + shiny::showModal( + shiny::modalDialog( + easyClose = TRUE, + size = "xl", + shiny::h3(glue("{title}")), + shiny::p( + glue::glue("{n_rows} rows") + ), + shiny::div( + class = "table-responsive", + style = "max-height: 70vh;", + DT::renderDataTable( + options = list(dom = "t", paging = FALSE), + server = TRUE, + rownames = FALSE, + { + result + } + ) + ), + footer = shiny::tagList( + shiny::downloadButton( + ns("downloadQuery"), + glue::glue("{download_title}") + ), + shiny::modalButton("Dismiss") + ) + ) + ) + ) +} + +#' Table Modal with Download Server +#' @noRd +#' +#' @importFrom shiny moduleServer +#' @importFrom shiny downloadHandler +#' @importFrom glue glue +#' @importFrom utils write.csv +#' +#' @param id A namespace id. +#' @param result A data frame to open for downloading. +#' +#' @return A model server. +table_modal_w_download_Server <- function(id, result) { + shiny::moduleServer( + id, + function(input, output, session) { + output$downloadQuery <- + shiny::downloadHandler( + filename = function(){ + glue::glue( + "query_{format(Sys.time(), \"%Y%m%d%H%M%S\")}.csv" + ) + }, + content = function(file) { + utils::write.csv(result, file, row.names = FALSE) + } + ) + } + ) +} diff --git a/R/vertica_connection.R b/R/vertica_connection.R new file mode 100644 index 0000000..1aa0a4a --- /dev/null +++ b/R/vertica_connection.R @@ -0,0 +1,197 @@ +#' A Database Specific Function To Retrieve Database Schemas +#' @noRd +#' +#' @param con A database connection object. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom dplyr pull +#' +#' @return A character vector of all schemas in the database. +get_schemas_vertica <- function(con) { + DBI::dbGetQuery( + con, + " + SELECT + schema_name + FROM v_catalog.schemata s + JOIN v_catalog.users u + ON s.schema_owner_id = u.user_id + ORDER BY schema_name; + " + ) |> + dplyr::pull(1) + +} + +#' A Database Specific Function to Retrieve All Database Tables +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' @importFrom dplyr pull +#' +#' @return A character vector of all tables in a given schema. +get_tables_vertica <- function(con, schema) { + + DBI::dbGetQuery( + con, + glue::glue( + " + SELECT + table_name + FROM v_catalog.tables + WHERE table_schema = '{schema}' + ORDER BY table_name; + " + ) + ) |> + dplyr::pull(1) + +} + +#' A Database Specific Function to Retrieve the Number of Rows of a Table +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' @param query A string containing the query to send. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' @importFrom dplyr pull +#' +#' @return An integer for the number of rows in the table. +get_n_rows_vertica <- function(con, schema, table, query = "") { + + if (query != ""){ + if (!grepl("^SELECT", trimws(query), ignore.case = TRUE)){ + return(0) + } + query_string <- + glue::glue( + " + SELECT COUNT(*) AS count + FROM ( + {query} + ) AS subquery + " + ) + } else { + query_string <- + glue::glue( + " + SELECT COUNT(*) AS count + FROM ( + SELECT * + FROM \"{schema}\".\"{table}\" + ) AS subquery; + " + ) + } + + DBI::dbGetQuery( + con, + query_string + ) |> + dplyr::pull(1) + + +} + +#' A Database Specific Function for Retrieving Table Previews +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' +#' @importFrom DBI dbGetQuery +#' @importFrom glue glue +#' +#' @return A data frame of 100 rows from the database table. +get_preview_vertica <- function(con, schema, table) { + DBI::dbGetQuery( + con, + glue::glue( + " + SELECT * + FROM \"{schema}\".\"{table}\" + LIMIT 100; + " + ) + ) +} + + +#' A Database Specific Function for Dropping Tables +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table A string containing the table name. +#' +#' @importFrom DBI dbSendQuery +#' @importFrom DBI dbClearResult +#' @importFrom glue glue +#' +#' @return A result string. Either "Success" or an error message. +delete_table_vertica <- function(con, schema, table){ + + res <- DBI::dbSendQuery( + con, + glue::glue( + "DROP TABLE \"{schema}\".\"{table}\"" + ) + ) + DBI::dbClearResult(res) + + return("Success") + +} + + +#' A Database Specific Function for Uploading Data Frames +#' @noRd +#' +#' @param con A database connection object. +#' @param schema A string containing the schema name. +#' @param table_name A string for the new table name. +#' @param data A data frame to be uploaded. +#' @param temporary A logical value. Should the table be temporary? +#' +#' @importFrom DBI dbWriteTable +#' @importFrom DBI Id +#' +#' @return A result string. Either "Success" or an error message. +write_table_vertica <- + function( + con, + schema, + table_name, + data, + temporary = FALSE + ){ + + DBI::dbSendQuery( + con, + glue::glue( + "SET SEARCH_PATH TO '{schema}'" + ) + ) |> + DBI::dbClearResult() + + DBI::dbWriteTable( + con, + name = table_name, + value = data.frame(data), + overwrite = TRUE, + temporary = temporary + ) + + return("Success") + + } diff --git a/R/view_database.R b/R/view_database.R new file mode 100644 index 0000000..50c0e17 --- /dev/null +++ b/R/view_database.R @@ -0,0 +1,513 @@ +#' View Database Connection with Octopus +#' @description This function opens a shiny instance where the database can +#' be viewed. +#' +#' @param con A database connection object. The result of DBI::dbConnect(). +#' @param options A named list of options to be passed along to shinyApp(). +#' +#' @importFrom shiny shinyApp +#' @importFrom shiny showNotification +#' @importFrom shiny updateSelectInput +#' @importFrom shiny updateSelectizeInput +#' @importFrom shiny tags +#' @importFrom shiny div +#' @importFrom shiny p +#' @importFrom shiny h3 +#' @importFrom shiny selectInput +#' @importFrom shiny fileInput +#' @importFrom shiny showModal +#' @importFrom shiny modalDialog +#' @importFrom shiny tagList +#' @importFrom shiny modalButton +#' @importFrom shiny removeModal +#' @importFrom shiny observeEvent +#' @importFrom shiny stopApp +#' @importFrom shiny bootstrapPage +#' @importFrom shiny req +#' @importFrom shinyAce aceEditor +#' @importFrom shinyAce updateAceEditor +#' @importFrom shinyjs useShinyjs +#' @importFrom shinyjs onclick +#' @importFrom shinyjs hideElement +#' @importFrom shinyjs showElement +#' @importFrom shinyjs onevent +#' @importFrom shinyjs html +#' @importFrom glue glue +#' @importFrom DT renderDataTable +#' @importFrom rio import +#' @importFrom janitor clean_names +#' @importFrom DBI dbSendQuery +#' @importFrom DBI dbGetQuery +#' @importFrom httr GET +#' @importFrom httr use_proxy +#' @importFrom httr content +#' @importFrom bslib bs_theme +#' @importFrom utils write.csv +#' +#' +#' @return An R Shiny instance. +#' @export +#' +view_database <- + function(con, options = list()){ + ui <- shiny::bootstrapPage( + theme = bslib::bs_theme( + version = 5, + base_font = bslib::font_google("Prompt") + ), + + # Initiate shinyjs + shinyjs::useShinyjs(), + + + # User Interface---------------------------------------------------------- + + shiny::div( + class = "container pt-3", + + shiny::div( + class = "row mx-0 px-0 justify-content-center", + shiny::div( + class = "col-12 col-md-5 col-lg-4 col-xl-3 mx-3", + id = "viewDiv", + + ## Manage UI ------------------------------------------------------- + + shiny::div( + class = "row bg-light py-3 mb-3 px-3 border rounded shadow", + + # Header + shiny::tags$h3( + class = "text-center", + "Manage" + ), + + shiny::hr(), + + # Select schema + shiny::selectInput( + "schema", + label = shiny::strong("Schema"), + choices = c("Loading..."), + width = "100%" + ), + + # Select table + shiny::selectInput( + "tables", + label = shiny::strong("Table"), + choices = c("Loading..."), + width = "100%" + ), + + shiny::div( + class = "btn-group mt-2 mb-4 w-100", + + # View Tables + shiny::actionButton( + inputId = "viewTable", + icon = shiny::icon("expand"), + "View" + ), + + # Delete Tables + shiny::actionButton( + inputId = "deleteTable", + icon = shiny::icon("trash"), + "Delete" + ) + ), + + # File upload to database + shiny::fileInput( + "newTableUpload", + label = shiny::strong("Upload File"), + accept = c(".csv", ".xlsx"), + width = "100%" + ), + + ), + + ## Query UI -------------------------------------------------------- + + shiny::div( + class = "d-none d-md-block row bg-light py-3 mt-3 px-3 border rounded shadow", + + # Header + shiny::tags$h3( + class = "text-center", + "Query" + ), + + shiny::hr(), + + shiny::div( + class = "btn-group mt-3 mb-3 w-100", + + # Run Query + shiny::actionButton( + inputId = "submitQuery", + icon = shiny::icon("play"), + "Run" + ), + + # Format Query + shiny::actionButton( + inputId = "formatQuery", + icon = shiny::icon("indent"), + "Format" + ) + ) + ), + + ), + shiny::div( + class = "d-none d-md-block col-12 col-md-6 col-lg-7 bg-light pb-2 pt-1 mt-3 mt-md-0 border rounded shadow", + id = "queryDiv", + + shiny::div( + class = "row h-100 pt-1", + shiny::div( + class = "col", + # Send query to database + shinyAce::aceEditor( + "query", + mode = "pgsql", + height = "100%", + value = "", + showPrintMargin = FALSE, + fontSize = 16, + highlightActiveLine = FALSE + ), + ) + ) + ), + ) + ) + ) + + server <- function(input, output, session) { + + # Database Functions ----------------------------------------------------- + driver <- class(con) + + tryCatch({ + database_functions <- get_database_functions(driver) + get_schemas <- database_functions[[1]] + get_tables <- database_functions[[2]] + get_n_rows <- database_functions[[3]] + get_preview <- database_functions[[4]] + delete_table <- database_functions[[5]] + write_table <- database_functions[[6]] + + }, error = function(error){ + shiny::showNotification(error$message) + }) + + + # Initialize Inputs ----------------------------------------------- + + tryCatch({ + schemas <- get_schemas(con) + + # Update schema list + shiny::updateSelectizeInput( + session, + "schema", + choices = schemas, + selected = schemas[1], + server = TRUE + ) + + # Set initial tables + current_tables <- get_tables(con, schemas[1]) + shiny::updateSelectizeInput( + session, + "tables", + choices = current_tables, + selected = current_tables[1], + server = TRUE + ) + + # Update table select on schema change + shinyjs::onevent("change", "schema", { + current_tables <- get_tables(con, input$schema) + shiny::updateSelectizeInput( + session, + "tables", + choices = current_tables, + selected = current_tables[1], + server = TRUE + ) + }) + }, error = function(error){ + shiny::showNotification(error$message) + }) + + + # View Table ------------------------------------------------------------ + + # View tables on click view button + shinyjs::onclick("viewTable", { + + tryCatch({ + + # Get the number of rows + n_rows <- get_n_rows( + con, + input$schema, + input$tables + ) + + result <- get_preview( + con, + input$schema, + input$tables + ) + + table_modal_w_download_Server( + id = "preview", + result = result + ) + + table_modal_w_download_UI( + id = "preview", + title = "Preview Table", + download_title = "Download Preview", + n_rows = n_rows, + result = result + ) + + }, error = function(error){ + shiny::showNotification(error$message) + }) + + }) + + # Delete Table ----------------------------------------------------------- + + # Allow deleting a table + shinyjs::onclick("deleteTable", { + + table <- input$tables + + shiny::showModal( + shiny::modalDialog( + easyClose = TRUE, + shiny::h3("Confirm Deletion"), + shiny::p(glue("Are you sure you want to delete the table: {table}?")), + footer = shiny::tags$button( + id = "confirmDelete", + class = "btn btn-outline-danger", + "Confirm" + ) + ) + ) + + # Confirm delete + shinyjs::onclick("confirmDelete", asis = TRUE, { + shiny::removeModal() + + tryCatch({ + result <- delete_table( + con, + input$schema, + input$tables + ) + + # Notify success + shiny::showNotification(result) + + # Update select input + current_tables <- get_tables(con, input$schema) + shiny::updateSelectizeInput( + session, + "tables", + choices = current_tables, + selected = current_tables[1], + server = TRUE + ) + + }, error = function(error){ + shiny::showNotification(error$message) + }) + + }) + }) + + + # Upload Table ----------------------------------------------------------- + + # Increase file upload limit + options(shiny.maxRequestSize = 2000 * 1024^2) + + # Upload file to DB + shiny::observeEvent(input$newTableUpload, { + + # Read file + file <- input$newTableUpload + shiny::req(file) + new_table <- rio::import( + file$datapath + ) + + shiny::showModal( + shiny::modalDialog( + easyClose = TRUE, + size = "s", + shiny::tagList( + shiny::textInput( + "newTableName", + "Confirm Table Name", + value = gsub("\\..+", "", file$name) + ), + shiny::selectInput( + "cleanColumnNames", + "Clean column names?", + choices = c("Yes", "No"), + selected = "Yes" + ), + shiny::selectInput( + "tempTable", + "Temporary table?", + choices = c("Yes", "No"), + selected = "Yes" + ) + ), + footer = shiny::tagList( + shiny::tags$button( + id = "confirmNewTableName", + class = "btn btn-outline-primary", + "data-bs-dismiss" = "modal", + "Confirm" + ) + ) + ) + ) + + shinyjs::onclick("confirmNewTableName", { + if (input$cleanColumnNames == "Yes") { + new_table <- + janitor::clean_names(new_table) + } + + tryCatch({ + # Write data frame to DB + result <- write_table( + con, + schema = input$schema, + table_name = input$newTableName, + data = new_table, + temporary = ifelse( + input$tempTable == "Yes", + TRUE, + FALSE + ) + ) + + # Show result + shiny::showNotification(result, duration = 3) + + # Update select input + current_tables <- get_tables(con, input$schema) + shiny::updateSelectizeInput( + session, + "tables", + choices = current_tables, + selected = current_tables[1], + server = TRUE + ) + + }, error = function(error){ + shiny::showNotification(error$message) + }) + + }) + }) + + # Query ---------------------------------------------------------------- + + # Allow submitting queries + shinyjs::onclick("submitQuery", { + + tryCatch({ + + n_rows <- get_n_rows( + con = con, + schema = input$schema, + table = input$tables, + query = input$query + ) + + result <- submit_query( + query = input$query, + con = con, + n_rows = n_rows + ) + + table_modal_w_download_Server( + id = "query", + result = result + ) + + table_modal_w_download_UI( + id = "query", + title = "Query Preview", + download_title = "Download", + n_rows = n_rows, + result = result + ) + + # Update select input + current_tables <- get_tables(con, input$schema) + shiny::updateSelectizeInput( + session, + "tables", + choices = current_tables, + selected = current_tables[1], + server = TRUE + ) + + }, error = function(error){ + shiny::showNotification( + error$message + ) + }) + + }) + + # Format ----------------------------------------------------------------- + + # Reformat SQL code + shinyjs::onclick("formatQuery", { + original_query <- input$query + tryCatch( + { + response <- + httr::GET( + glue::glue( + "https://sqlformat.org/api/v1/format", + "?reindent=1", + "&keyword_case=upper", + "&sql={URLencode(original_query)}" + ), + httr::use_proxy( + Sys.getenv("https_proxy") + ) + ) + shinyAce::updateAceEditor( + session = session, + editorId = "query", + value = httr::content(response, as = "parsed")$result + ) + }, + error = function(error) { + shiny::showNotification(error$message) + } + ) + }) + + } + + shiny::shinyApp(ui, server, options = options) + } diff --git a/README.md b/README.md new file mode 100644 index 0000000..0c6d862 --- /dev/null +++ b/README.md @@ -0,0 +1,71 @@ + + + +# octopus + + + +[![R-CMD-check](https://github.com/MCodrescu/octopus/actions/workflows/R-CMD-check.yaml/badge.svg)](https://github.com/MCodrescu/octopus/actions/workflows/R-CMD-check.yaml) + + + + +The *octopus* package is a database management tool built entirely in R. +You can preview tables, upload files, send queries, and more. + +All database credentials are handled by the R user. Simply pass a +supported database connection object created with `DBI::dbConnect()` to +the function `octopus::view_database()` and *octopus* will start a shiny +application allowing you to interact with the database. + +*Try it out here! +[shinyapps.io](https://zszxyy-marcus-codrescu.shinyapps.io/octopusconceptapp/)* + +![octopus Interface](images/octopusMainPage3.png) + +## Supported Databases + +The *octopus* package currently supports the following databases: + +- Postgres + +- MySQL + +- SQLite + +- DuckDB + +- Snowflake + +- Vertica DB + +## Installation + +Install the stable version from CRAN. + +``` r +install.packages("octopus") +``` + +Install the development version from github. + +``` r +devtools::install_github("MCodrescu/octopus") +``` + +## Example + +Here is an example of connecting to a database and running the main +function of octopus. + +``` r +# Create a Database Connection +drv <- duckdb::duckdb() +con <- DBI::dbConnect(drv) + +# Write some data +DBI::dbWriteTable(con, "mtcars", mtcars) + +# View the Database +octopus::view_database(con) +``` diff --git a/man/list_drivers.Rd b/man/list_drivers.Rd new file mode 100644 index 0000000..ee819b2 --- /dev/null +++ b/man/list_drivers.Rd @@ -0,0 +1,14 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/list_drivers.R +\name{list_drivers} +\alias{list_drivers} +\title{List Compatible Database Drivers} +\usage{ +list_drivers() +} +\value{ +A character vector of compatible database drivers. +} +\description{ +List Compatible Database Drivers +} diff --git a/man/view_database.Rd b/man/view_database.Rd new file mode 100644 index 0000000..c3913f4 --- /dev/null +++ b/man/view_database.Rd @@ -0,0 +1,20 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/view_database.R +\name{view_database} +\alias{view_database} +\title{View Database Connection with Octopus} +\usage{ +view_database(con, options = list()) +} +\arguments{ +\item{con}{A database connection object. The result of DBI::dbConnect().} + +\item{options}{A named list of options to be passed along to shinyApp().} +} +\value{ +An R Shiny instance. +} +\description{ +This function opens a shiny instance where the database can + be viewed. +} diff --git a/tests/testthat.R b/tests/testthat.R new file mode 100644 index 0000000..23c474b --- /dev/null +++ b/tests/testthat.R @@ -0,0 +1,12 @@ +# This file is part of the standard setup for testthat. +# It is recommended that you do not modify it. +# +# Where should you do additional test configuration? +# Learn more about the roles of various files in: +# * https://r-pkgs.org/tests.html +# * https://testthat.r-lib.org/reference/test_package.html#special-files + +library(testthat) +library(octopus) + +test_check("octopus") diff --git a/tests/testthat/test-duckdb_connection.R b/tests/testthat/test-duckdb_connection.R new file mode 100644 index 0000000..809abbc --- /dev/null +++ b/tests/testthat/test-duckdb_connection.R @@ -0,0 +1,179 @@ + +duckdb_installed <- + tryCatch({ + drv <- + duckdb::duckdb() + + con <- + DBI::dbConnect( + drv + ) + TRUE + }, error = function(error){ + FALSE + }) + +if (duckdb_installed){ + + #------------------------------------------------------------------------------- + + test_that( + "get_schemas retrieves schemas correctly", + { + + expect_equal( + get_schemas_duckdb(con), + c("information_schema", "main", "pg_catalog") + ) + + } + ) + + test_that( + "get_tables retrieve tables correctly", + { + + DBI::dbWriteTable( + con, + name = DBI::Id( + schema = "main", + table = "mtcars" + ), + value = mtcars, + overwrite = TRUE + ) + + expect_true( + "mtcars" %in% get_tables_duckdb( + con, + schema = "main" + ) + ) + + } + ) + + test_that( + "get_n_rows retrieves the correct number of rows of a table", + { + expect_equal( + get_n_rows_duckdb( + con, + schema = "main", + table = "mtcars" + ) |> as.numeric(), + nrow(mtcars) |> as.numeric() + ) + } + ) + + test_that( + "get_n_rows retrieves the correct number of rows of a query", + { + expect_equal( + get_n_rows_duckdb( + con, + schema = "main", + table = "mtcars", + query = "SELECT * FROM mtcars LIMIT 10" + ) |> as.numeric(), + 10 + ) + } + ) + + test_that( + "get_preview returns a view of the dataframe", + { + mtcars_wo_rownames <- + mtcars + + rownames(mtcars_wo_rownames) <- + NULL + + expect_equal( + get_preview_duckdb( + con, + schema = "main", + table = "mtcars" + ), + mtcars_wo_rownames + ) + } + ) + + test_that( + "a create table query works correcty", + { + n_rows = get_n_rows_duckdb( + con = con, + schema = "", + table = "", + "CREATE TABLE mtcars_2 AS SELECT * FROM mtcars" + ) + + submit_query( + "CREATE TABLE mtcars_2 AS SELECT * FROM mtcars", + con = con, + n_rows = n_rows + ) + + expect_true( + "mtcars_2" %in% DBI::dbListTables(con) + ) + } + ) + + test_that( + "delete_table correctly drops the table", + { + expect_true( + "mtcars" %in% DBI::dbListTables(con) + ) + + expect_equal( + "Success", + delete_table_duckdb( + con, + schema = "main", + table = "mtcars" + ) + ) + + expect_false( + "mtcars" %in% DBI::dbListTables(con) + ) + + } + ) + + test_that( + "write_table correctly upload table", + { + + res <- DBI::dbSendQuery(con, "CREATE SCHEMA example") + DBI::dbClearResult(res) + + write_table_duckdb( + con, + schema = "example", + table_name = "mtcars", + data = mtcars + ) + + expect_true( + "mtcars" %in% get_tables_duckdb(con, schema = "example") + ) + + } + ) + + + + #------------------------------------------------------------------------------- + + duckdb::duckdb_shutdown(drv) + DBI::dbDisconnect(con) + +} + diff --git a/tests/testthat/test-list_drivers.R b/tests/testthat/test-list_drivers.R new file mode 100644 index 0000000..de82e4f --- /dev/null +++ b/tests/testthat/test-list_drivers.R @@ -0,0 +1,16 @@ +test_that( + "the drivers list is correct", + { + expect_equal( + c( + "PqConnection", + "Snowflake", + "Vertica Database", + "duckdb_connection", + "MySQLConnection", + "SQLiteConnection" + ), + list_drivers() + ) + } +) diff --git a/tests/testthat/test-mysql_connection.R b/tests/testthat/test-mysql_connection.R new file mode 100644 index 0000000..fc2f000 --- /dev/null +++ b/tests/testthat/test-mysql_connection.R @@ -0,0 +1,206 @@ +### Docker is required to run these tests. +### Read more about docker at https://hub.docker.com/_/mysql + +docker_working <- + tryCatch({ + container_sha <- + system( + "docker run -e MYSQL_ROOT_PASSWORD=password -e MYSQL_USER=admin -e MYSQL_PASSWORD=password -e MYSQL_DATABASE=example -p 3306:3306 -d mysql", + intern = TRUE + ) + + Sys.sleep(20) + + con <- + DBI::dbConnect( + RMySQL::MySQL(), + host = "127.0.0.1", + username = "root", + password = "password", + dbname = "example", + port = 3306 + ) + + TRUE + }, error = \(x){ + FALSE + }) + +if(docker_working){ + + #------------------------------------------------------------------------------- + + test_that( + "get_schemas retrieves schemas correctly", + { + + expect_equal( + get_schemas_mysql(con), + c("example", "information_schema", "mysql", "performance_schema", "sys") + ) + + } + ) + + test_that( + "get_tables retrieve tables correctly", + { + + DBI::dbSendQuery( + con, + "SET @@GLOBAL.local_infile = 1;" + ) + + result <- write_table_mysql( + con, + schema = "example", + table_name = "mtcars", + data = mtcars + ) + + expect_true( + "mtcars" %in% get_tables_mysql( + con, + schema = "example" + ) + ) + + } + ) + + test_that( + "get_n_rows retrieves the correct number of rows of a table", + { + expect_equal( + get_n_rows_mysql( + con, + schema = "example", + table = "mtcars" + ) |> as.numeric(), + nrow(mtcars) |> as.numeric() + ) + } + ) + + test_that( + "get_n_rows retrieves the correct number of rows of a query", + { + expect_equal( + get_n_rows_mysql( + con, + schema = "example", + table = "mtcars", + query = "SELECT * FROM mtcars LIMIT 10" + ) |> as.numeric(), + 10 + ) + } + ) + + test_that( + "get_preview returns a view of the dataframe", + { + mtcars_wo_rownames <- + mtcars + + rownames(mtcars_wo_rownames) <- + NULL + + expect_equal( + get_preview_mysql( + con, + schema = "example", + table = "mtcars" + ), + mtcars_wo_rownames + ) + } + ) + + + test_that( + "a create table query works correcty", + { + n_rows = get_n_rows_mysql( + con = con, + schema = "", + table = "", + "CREATE TABLE mtcars_2 AS SELECT * FROM mtcars" + ) + + submit_query( + "CREATE TABLE mtcars_2 AS SELECT * FROM mtcars", + con = con, + n_rows = n_rows + ) + + expect_true( + "mtcars_2" %in% DBI::dbListTables(con) + ) + } + ) + + + test_that( + "delete_table correctly drops the table", + { + + expect_equal( + "Success", + delete_table_mysql( + con, + schema = "example", + table = "mtcars" + ) + ) + + expect_false( + "mtcars" %in% get_tables_mysql(con, schema = "example") + ) + + } + ) + + test_that( + "write_table correctly upload table", + { + + res <- DBI::dbSendQuery(con, "CREATE SCHEMA example_2") + DBI::dbClearResult(res) + + write_table_mysql( + con, + schema = "example_2", + table_name = "mtcars", + data = mtcars + ) + + expect_true( + "mtcars" %in% get_tables_mysql(con, schema = "example_2") + ) + + } + ) + + #------------------------------------------------------------------------------- + + DBI::dbDisconnect(con) + + system( + glue::glue( + "docker stop {container_sha}" + ), + intern = TRUE + ) + + Sys.sleep(3) + + system( + glue::glue( + "docker rm {container_sha}" + ), + intern = TRUE + ) + +} + diff --git a/tests/testthat/test-postgres_connection.R b/tests/testthat/test-postgres_connection.R new file mode 100644 index 0000000..a38b55a --- /dev/null +++ b/tests/testthat/test-postgres_connection.R @@ -0,0 +1,206 @@ +### Docker is required to run these tests. +### Read more about docker at https://hub.docker.com/_/postgres + +docker_working <- + tryCatch({ + container_sha <- + system( + "docker run -e POSTGRES_PASSWORD=password -e POSTGRES_DB=example -p 5455:5432 -d postgres", + intern = TRUE + ) + + Sys.sleep(3) + + con <- + DBI::dbConnect( + RPostgres::Postgres(), + host = "localhost", + user = "postgres", + password = "password", + dbname = "example", + port = 5455 + ) + + TRUE + }, error = \(x){ + FALSE + }) + +if(docker_working){ + + #------------------------------------------------------------------------------- + + test_that( + "get_schemas retrieves schemas correctly", + { + + expect_equal( + get_schemas_postgres(con), + c("information_schema", "pg_catalog", "pg_toast", "public") + ) + + } + ) + + test_that( + "get_tables retrieve tables correctly", + { + + DBI::dbWriteTable( + con, + name = DBI::Id( + schema = "public", + table = "mtcars" + ), + value = mtcars, + overwrite = TRUE + ) + + expect_true( + "mtcars" %in% get_tables_postgres( + con, + schema = "public" + ) + ) + + } + ) + + test_that( + "get_n_rows retrieves the correct number of rows of a table", + { + expect_equal( + get_n_rows_postgres( + con, + schema = "public", + table = "mtcars" + ) |> as.numeric(), + nrow(mtcars) |> as.numeric() + ) + } + ) + + test_that( + "get_n_rows retrieves the correct number of rows of a query", + { + expect_equal( + get_n_rows_postgres( + con, + schema = "public", + table = "mtcars", + query = "SELECT * FROM mtcars LIMIT 10" + ) |> as.numeric(), + 10 + ) + } + ) + + test_that( + "get_preview returns a view of the dataframe", + { + mtcars_wo_rownames <- + mtcars + + rownames(mtcars_wo_rownames) <- + NULL + + expect_equal( + get_preview_postgres( + con, + schema = "public", + table = "mtcars" + ), + mtcars_wo_rownames + ) + } + ) + + test_that( + "a create table query works correcty", + { + n_rows = get_n_rows_postgres( + con = con, + schema = "", + table = "", + "CREATE TABLE mtcars_2 AS SELECT * FROM mtcars" + ) + + submit_query( + "CREATE TABLE mtcars_2 AS SELECT * FROM mtcars", + con = con, + n_rows = n_rows + ) + + expect_true( + "mtcars_2" %in% DBI::dbListTables(con) + ) + } + ) + + + test_that( + "delete_table correctly drops the table", + { + expect_true( + "mtcars" %in% DBI::dbListTables(con) + ) + + expect_equal( + "Success", + delete_table_postgres( + con, + schema = "public", + table = "mtcars" + ) + ) + + expect_false( + "mtcars" %in% DBI::dbListTables(con) + ) + + } + ) + + test_that( + "write_table correctly upload table", + { + + res <- DBI::dbSendQuery(con, "CREATE SCHEMA example") + DBI::dbClearResult(res) + + write_table_postgres( + con, + schema = "example", + table_name = "mtcars", + data = mtcars + ) + + expect_true( + "mtcars" %in% get_tables_postgres(con, schema = "example") + ) + + } + ) + + #------------------------------------------------------------------------------- + + DBI::dbDisconnect(con) + + system( + glue::glue( + "docker stop {container_sha}" + ), + intern = TRUE + ) + + Sys.sleep(3) + + system( + glue::glue( + "docker rm {container_sha}" + ), + intern = TRUE + ) + +} + diff --git a/tests/testthat/test-snowflake_connection.R b/tests/testthat/test-snowflake_connection.R new file mode 100644 index 0000000..c7c5921 --- /dev/null +++ b/tests/testthat/test-snowflake_connection.R @@ -0,0 +1,213 @@ +## A Snowflake database is required for these tests. +## You can get a free trial at https://signup.snowflake.com/ + +snowflake_key_set <- + tryCatch({ + keyring::key_get("SnowflakeTrialPassword") != "" + }, error = function(error){ + FALSE + }) + +if (snowflake_key_set ){ + + con <- + DBI::dbConnect( + odbc::odbc(), + dsn = "Snowflake_Trial", + pwd = keyring::key_get("SnowflakeTrialPassword") + ) + + test_that( + "get_schemas retrieves schemas correctly", + { + + expect_equal( + get_schemas_snowflake(con), + c("INFORMATION_SCHEMA", "PUBLIC") + ) + + } + ) + + test_that( + "get_tables retrieves tables correctly", + { + + mtcars_2 <- mtcars + + rownames(mtcars_2) <- + seq_len(nrow(mtcars)) + + DBI::dbWriteTable( + con, + name = DBI::Id( + schema = "PUBLIC", + table = "MTCARS" + ), + value = mtcars_2, + overwrite = TRUE, + row.names = TRUE + ) + + expect_true( + "MTCARS" %in% get_tables_snowflake( + con, + schema = "PUBLIC" + ) + ) + + } + ) + + test_that( + "get_n_rows retrieves the correct number of rows of a table", + { + expect_equal( + get_n_rows_snowflake( + con, + schema = "PUBLIC", + table = "MTCARS" + ) |> as.numeric(), + nrow(mtcars) |> as.numeric() + ) + } + ) + + test_that( + "get_n_rows retrieves the correct number of rows of a query", + { + expect_equal( + get_n_rows_snowflake( + con, + schema = "PUBLIC", + table = "MTCARS", + query = "SELECT * FROM MTCARS LIMIT 10" + ) |> as.numeric(), + 10 + ) + } + ) + + test_that( + "get_preview returns a view of the dataframe", + { + + # Note: Snowflake writes tables in a different order + + mtcars_2 <- mtcars + + rownames(mtcars_2) <- + seq_len(nrow(mtcars)) + + expect_equal( + get_preview_snowflake( + con, + schema = "PUBLIC", + table = "MTCARS" + ) |> + dplyr::mutate( + row_names = readr::parse_number( + row_names + ) + ) |> + dplyr::arrange(row_names) |> + dplyr::select(-c("row_names")), + mtcars_2 + ) + } + ) + + test_that( + "a create table query works correcty", + { + n_rows = get_n_rows_snowflake( + con = con, + schema = "", + table = "", + "CREATE TABLE mtcars_2 AS SELECT * FROM mtcars" + ) + + submit_query( + "USE SCHEMA PUBLIC", + con = con, + n_rows = n_rows + ) + + submit_query( + "CREATE TABLE mtcars_2 AS SELECT * FROM mtcars", + con = con, + n_rows = n_rows + ) + + expect_true( + "MTCARS_2" %in% DBI::dbListTables(con) + ) + } + ) + + test_that( + "delete_table correctly drops the table", + { + expect_true( + "MTCARS" %in% DBI::dbListTables(con) + ) + + expect_equal( + "Success", + delete_table_snowflake( + con, + schema = "PUBLIC", + table = "MTCARS" + ) + ) + + expect_equal( + "Success", + delete_table_snowflake( + con, + schema = "PUBLIC", + table = "MTCARS_2" + ) + ) + + expect_false( + "MTCARS" %in% DBI::dbListTables(con) + ) + + expect_false( + "MTCARS_2" %in% DBI::dbListTables(con) + ) + + } + ) + + test_that( + "write_table correctly uploads table", + { + + DBI::dbSendQuery(con, "CREATE SCHEMA EXAMPLE") |> + DBI::dbClearResult() + + DBI::dbSendQuery(con, "USE SCHEMA EXAMPLE") |> + DBI::dbClearResult() + + write_table_snowflake( + con, + schema = "EXAMPLE", + table_name = "MTCARS", + data = mtcars + ) + + expect_true( + "MTCARS" %in% get_tables_snowflake(con, schema = "EXAMPLE") + ) + + DBI::dbSendQuery(con, "DROP SCHEMA EXAMPLE") |> + DBI::dbClearResult() + + } + ) + + DBI::dbDisconnect(con) +} + diff --git a/tests/testthat/test-sqlite_connection.R b/tests/testthat/test-sqlite_connection.R new file mode 100644 index 0000000..e19514f --- /dev/null +++ b/tests/testthat/test-sqlite_connection.R @@ -0,0 +1,183 @@ +### RSQLite Package is Required to Run These Tests + +sqlite_installed <- + tryCatch({ + con <- + DBI::dbConnect( + RSQLite::SQLite(), + ":memory:" + ) + TRUE + }, error = function(error){ + FALSE + }) + + + +#------------------------------------------------------------------------------- +if (sqlite_installed){ + test_that( + "get_schemas retrieves schemas correctly", + { + + expect_equal( + get_schemas_sqlite(con), + c("main") + ) + + } + ) + + test_that( + "get_tables retrieve tables correctly", + { + + DBI::dbWriteTable( + con, + name = "mtcars", + value = mtcars, + overwrite = TRUE + ) + + DBI::dbWriteTable( + con, + name = "mtcars_temp", + value = mtcars, + overwrite = TRUE, + temporary = TRUE + ) + + expect_true( + "mtcars" %in% get_tables_sqlite( + con, + schema = "main" + ) + ) + + expect_true( + "mtcars_temp" %in% get_tables_sqlite( + con, + schema = "temp" + ) + ) + + } + ) + + test_that( + "get_n_rows retrieves the correct number of rows of a table", + { + expect_equal( + get_n_rows_sqlite( + con, + schema = "main", + table = "mtcars" + ) |> as.numeric(), + nrow(mtcars) |> as.numeric() + ) + } + ) + + test_that( + "get_n_rows retrieves the correct number of rows of a query", + { + expect_equal( + get_n_rows_sqlite( + con, + schema = "main", + table = "mtcars", + query = "SELECT * FROM mtcars LIMIT 10" + ) |> as.numeric(), + 10 + ) + } + ) + + test_that( + "get_preview returns a view of the dataframe", + { + mtcars_wo_rownames <- + mtcars + + rownames(mtcars_wo_rownames) <- + NULL + + expect_equal( + get_preview_sqlite( + con, + schema = "main", + table = "mtcars" + ), + mtcars_wo_rownames + ) + } + ) + + test_that( + "a create table query works correcty", + { + n_rows = get_n_rows_sqlite( + con = con, + schema = "", + table = "", + "CREATE TABLE mtcars_2 AS SELECT * FROM mtcars" + ) + + submit_query( + "CREATE TABLE mtcars_2 AS SELECT * FROM mtcars", + con = con, + n_rows = n_rows + ) + + expect_true( + "mtcars_2" %in% DBI::dbListTables(con) + ) + } + ) + + test_that( + "delete_table correctly drops the table", + { + expect_true( + "mtcars" %in% DBI::dbListTables(con) + ) + + expect_equal( + "Success", + delete_table_sqlite( + con, + schema = "main", + table = "mtcars" + ) + ) + + expect_false( + "mtcars" %in% DBI::dbListTables(con) + ) + + } + ) + + test_that( + "write_table correctly upload table", + { + + write_table_sqlite( + con, + schema = "main", + table_name = "mtcars", + data = mtcars + ) + + expect_true( + "mtcars" %in% get_tables_sqlite(con, schema = "main") + ) + + } + ) + + #------------------------------------------------------------------------------- + + DBI::dbDisconnect(con) +} + diff --git a/tests/testthat/test-submit_query.R b/tests/testthat/test-submit_query.R new file mode 100644 index 0000000..0da0933 --- /dev/null +++ b/tests/testthat/test-submit_query.R @@ -0,0 +1,148 @@ +con <- DBI::dbConnect( + RSQLite::SQLite() +) + +test_that( + "queries with results too large are not returned", + { + + example_data <- + data.frame( + x = seq_len(50001) + ) + + write_table_sqlite( + con, + schema = "main", + table_name = "example_data", + data = example_data + ) + + n_rows <- get_n_rows_sqlite( + con = con, + schema = "", + table = "", + query = "SELECT * FROM example_data" + ) + + expect_error( + submit_query( + query = query, + con = con, + n_rows = n_rows + ) + ) + + } +) + +test_that( + "blank queries cause an error", + { + + query <- "" + + n_rows <- get_n_rows_sqlite( + con = con, + schema = "", + table = "", + query = query + ) + + expect_error( + submit_query( + query = query, + con = con, + n_rows = n_rows + ) + ) + + } +) + +test_that( + "select queries return a result", + { + + example_data <- + data.frame( + x = c(1, 2, 3), + y = c(4, 5, 6) + ) + + write_table_sqlite( + con, + schema = "main", + table_name = "example_data", + data = example_data + ) + + query <- "SELECT * FROM example_data" + + n_rows <- get_n_rows_sqlite( + con = con, + schema = "", + table = "", + query = query + ) + + expect_equal( + example_data, + submit_query( + query = query, + con = con, + n_rows = n_rows + ) + ) + + } +) + +test_that( + "create queries return only a success message", + { + + query <- "CREATE TABLE example_2 AS SELECT * FROM example_data" + + n_rows <- get_n_rows_sqlite( + con = con, + schema = "", + table = "", + query = query + ) + + expect_equal( + submit_query( + query = query, + con = con, + n_rows = n_rows + ), + data.frame(result = "Success") + ) + } +) + +test_that( + "queries with syntax errors return errors", + { + + query <- "SELECT * FRM example_data" + + n_rows <- get_n_rows_sqlite( + con = con, + schema = "", + table = "", + query = query + ) + + expect_error( + submit_query( + query = query, + con = con, + n_rows = n_rows + ) + ) + } +) + +DBI::dbDisconnect(con) diff --git a/tests/testthat/test-vertica_connection.R b/tests/testthat/test-vertica_connection.R new file mode 100644 index 0000000..0897fc8 --- /dev/null +++ b/tests/testthat/test-vertica_connection.R @@ -0,0 +1,216 @@ +### Docker is required to run these tests. +### Read more about docker at https://hub.docker.com/_/postgres + +docker_working <- + tryCatch({ + container_sha <- + system( + "docker run -d -p 5433:5433 -e APP_DB_USER=newdbadmin -e APP_DB_PASSWORD=vertica vertica/vertica-ce", + intern = TRUE + ) + TRUE + }, error = \(x){ + FALSE + }) + +odbc_dsn_setup <- + tryCatch({ + "Vertica_Test" %in% odbc::odbcListDataSources()$name + }, error = function(error){ + FALSE + }) + +if(docker_working & odbc_dsn_setup){ + + Sys.sleep(120) + + con <- DBI::dbConnect( + odbc::odbc(), + dsn = "Vertica_Test", + pwd = "vertica" + ) + + #------------------------------------------------------------------------------- + + test_that( + "get_schemas retrieves schemas correctly", + { + + expect_equal( + get_schemas_vertica(con), + c("online_sales", "public", "store", "v_catalog", "v_func", "v_internal", "v_monitor", "v_txtindex") + ) + + } + ) + + test_that( + "get_tables retrieve tables correctly", + { + + mtcars_2 <- mtcars + + mtcars_2["order"] <- seq_len( + nrow(mtcars) + ) + + DBI::dbWriteTable( + con, + name = DBI::Id( + schema = "public", + table = "mtcars" + ), + value = mtcars_2, + overwrite = TRUE, + row.names = FALSE + ) + + expect_true( + "mtcars" %in% get_tables_vertica( + con, + schema = "public" + ) + ) + + } + ) + + test_that( + "get_n_rows retrieves the correct number of rows of a table", + { + expect_equal( + get_n_rows_vertica( + con, + schema = "public", + table = "mtcars" + ) |> as.numeric(), + nrow(mtcars) |> as.numeric() + ) + } + ) + + test_that( + "get_n_rows retrieves the correct number of rows of a query", + { + expect_equal( + get_n_rows_vertica( + con, + schema = "public", + table = "mtcars", + query = "SELECT * FROM mtcars LIMIT 10" + ) |> as.numeric(), + 10 + ) + } + ) + + test_that( + "get_preview returns a view of the dataframe", + { + mtcars_wo_rownames <- + mtcars + + rownames(mtcars_wo_rownames) <- + NULL + + expect_equal( + get_preview_vertica( + con, + schema = "public", + table = "mtcars" + ) |> + dplyr::arrange(order) |> + dplyr::select(-c(order)), + mtcars_wo_rownames + ) + } + ) + + test_that( + "a create table query works correcty", + { + n_rows = get_n_rows_vertica( + con = con, + schema = "", + table = "", + "CREATE TABLE mtcars_2 AS SELECT * FROM mtcars" + ) + + submit_query( + "CREATE TABLE mtcars_2 AS SELECT * FROM mtcars", + con = con, + n_rows = n_rows + ) + + expect_true( + "mtcars_2" %in% DBI::dbListTables(con) + ) + } + ) + + test_that( + "delete_table correctly drops the table", + { + expect_true( + "mtcars" %in% DBI::dbListTables(con) + ) + + expect_equal( + "Success", + delete_table_vertica( + con, + schema = "public", + table = "mtcars" + ) + ) + + expect_false( + "mtcars" %in% DBI::dbListTables(con) + ) + + } + ) + + test_that( + "write_table correctly upload table", + { + + res <- DBI::dbSendQuery(con, "CREATE SCHEMA example") + DBI::dbClearResult(res) + + write_table_vertica( + con, + schema = "example", + table_name = "mtcars", + data = mtcars + ) + + expect_true( + "mtcars" %in% get_tables_vertica(con, schema = "example") + ) + + } + ) + + #------------------------------------------------------------------------------- + + DBI::dbDisconnect(con) + + system( + glue::glue( + "docker stop {container_sha}" + ), + intern = TRUE + ) + + Sys.sleep(3) + + system( + glue::glue( + "docker rm {container_sha}" + ), + intern = TRUE + ) + +} + diff --git a/tests/testthat/test-view_database.R b/tests/testthat/test-view_database.R new file mode 100644 index 0000000..b82c83c --- /dev/null +++ b/tests/testthat/test-view_database.R @@ -0,0 +1,21 @@ +## Checklist of Manual Tests +# Check that schemas are shown +# Check that tables are shown +# Check that changes a schema changes the tables +# Check that clicking View button shows a modal with the table +# Check that downloading the table works +# Check that creating a new table works +# Check that removing that new table works +# Check that a simple SELECT * FROM mtcars query works +# Check that CREATE and ALTER queries work +# Check that formatting a query works +# Check that the collapsing looks good on every screen size. +# Check that uploading a table works +# Check that deleting a table works + +test_that( + "the checklist was completed", + { + expect_true(TRUE) + } +)