From 05bc016db9c2e92d3f042b81dc39a3e4a748dc52 Mon Sep 17 00:00:00 2001 From: Dmitry Patsura Date: Tue, 31 Mar 2026 15:38:24 +0200 Subject: [PATCH] feat: Support pre-aggregation-specific data source credentials (CORE-123) refactor: Replace envVar helper with getVar in env.ts refactor: Remove getVar helper, inline get(keyByDataSource(...)) directly --- .../cubejs-athena-driver/src/AthenaDriver.ts | 38 +- packages/cubejs-backend-shared/src/env.ts | 1152 +++++++---------- packages/cubejs-backend-shared/src/index.ts | 1 + .../test/db_env_pre_aggregations.test.ts | 192 +++ packages/cubejs-base-driver/src/BaseDriver.ts | 8 +- .../src/BigQueryDriver.ts | 29 +- .../src/ClickHouseDriver.ts | 40 +- .../src/DatabricksDriver.ts | 46 +- .../driver/DremioDriver.js | 23 +- .../cubejs-druid-driver/src/DruidDriver.ts | 20 +- .../cubejs-duckdb-driver/src/DuckDBDriver.ts | 1 + .../driver/ElasticSearchDriver.js | 23 +- .../src/FireboltDriver.ts | 22 +- packages/cubejs-hive-driver/src/HiveDriver.js | 21 +- packages/cubejs-jdbc-driver/src/JDBCDriver.ts | 16 +- packages/cubejs-ksql-driver/src/KsqlDriver.ts | 20 +- .../src/MongoBIDriver.ts | 20 +- .../cubejs-mssql-driver/src/MSSqlDriver.ts | 24 +- .../driver/AuroraServerlessMySqlDriver.js | 11 +- .../cubejs-mysql-driver/src/MySqlDriver.ts | 22 +- .../driver/OracleDriver.js | 13 +- .../cubejs-pinot-driver/src/PinotDriver.ts | 24 +- .../src/PostgresDriver.ts | 30 +- .../src/PrestoDriver.ts | 38 +- .../src/orchestrator/DriverFactory.ts | 2 +- .../src/orchestrator/PreAggregations.ts | 20 +- .../cubejs-questdb-driver/src/QuestDriver.ts | 20 +- .../src/RedshiftDriver.ts | 35 +- .../src/core/OptsHandler.ts | 1 + .../cubejs-server-core/src/core/server.ts | 30 +- packages/cubejs-server-core/src/core/types.ts | 2 + .../src/SnowflakeDriver.ts | 80 +- .../driver/SqliteDriver.js | 3 +- 33 files changed, 1044 insertions(+), 983 deletions(-) create mode 100644 packages/cubejs-backend-shared/test/db_env_pre_aggregations.test.ts diff --git a/packages/cubejs-athena-driver/src/AthenaDriver.ts b/packages/cubejs-athena-driver/src/AthenaDriver.ts index fd8d5f76da755..cc3ad6f00bdca 100644 --- a/packages/cubejs-athena-driver/src/AthenaDriver.ts +++ b/packages/cubejs-athena-driver/src/AthenaDriver.ts @@ -99,6 +99,11 @@ export class AthenaDriver extends BaseDriver implements DriverInterface { */ dataSource?: string, + /** + * Whether this driver is used for pre-aggregations. + */ + preAggregations?: boolean, + /** * Max pool size value for the [cube]<-->[db] pool. */ @@ -118,28 +123,29 @@ export class AthenaDriver extends BaseDriver implements DriverInterface { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; const accessKeyId = config.accessKeyId || - getEnv('athenaAwsKey', { dataSource }); + getEnv('athenaAwsKey', { dataSource, preAggregations }); const secretAccessKey = config.secretAccessKey || - getEnv('athenaAwsSecret', { dataSource }); + getEnv('athenaAwsSecret', { dataSource, preAggregations }); const assumeRoleArn = config.athenaAwsAssumeRoleArn || - getEnv('athenaAwsAssumeRoleArn', { dataSource }); + getEnv('athenaAwsAssumeRoleArn', { dataSource, preAggregations }); const assumeRoleExternalId = config.athenaAwsAssumeRoleExternalId || - getEnv('athenaAwsAssumeRoleExternalId', { dataSource }); + getEnv('athenaAwsAssumeRoleExternalId', { dataSource, preAggregations }); const { schema, ...restConfig } = config; this.schema = schema || - getEnv('dbName', { dataSource }) || - getEnv('dbSchema', { dataSource }); + getEnv('dbName', { dataSource, preAggregations }) || + getEnv('dbSchema', { dataSource, preAggregations }); // Configure credentials based on authentication method let credentials; @@ -166,34 +172,34 @@ export class AthenaDriver extends BaseDriver implements DriverInterface { ...restConfig, region: config.region || - getEnv('athenaAwsRegion', { dataSource }), + getEnv('athenaAwsRegion', { dataSource, preAggregations }), S3OutputLocation: config.S3OutputLocation || - getEnv('athenaAwsS3OutputLocation', { dataSource }), + getEnv('athenaAwsS3OutputLocation', { dataSource, preAggregations }), workGroup: config.workGroup || - getEnv('athenaAwsWorkgroup', { dataSource }) || + getEnv('athenaAwsWorkgroup', { dataSource, preAggregations }) || 'primary', catalog: config.catalog || - getEnv('athenaAwsCatalog', { dataSource }), + getEnv('athenaAwsCatalog', { dataSource, preAggregations }), database: config.database || - getEnv('dbName', { dataSource }), + getEnv('dbName', { dataSource, preAggregations }), exportBucket: config.exportBucket || - getEnv('dbExportBucket', { dataSource }), + getEnv('dbExportBucket', { dataSource, preAggregations }), pollTimeout: ( config.pollTimeout || - getEnv('dbPollTimeout', { dataSource }) || - getEnv('dbQueryTimeout', { dataSource }) + getEnv('dbPollTimeout', { dataSource, preAggregations }) || + getEnv('dbQueryTimeout', { dataSource, preAggregations }) ) * 1000, pollMaxInterval: ( config.pollMaxInterval || - getEnv('dbPollMaxInterval', { dataSource }) + getEnv('dbPollMaxInterval', { dataSource, preAggregations }) ) * 1000, exportBucketCsvEscapeSymbol: - getEnv('dbExportBucketCsvEscapeSymbol', { dataSource }), + getEnv('dbExportBucketCsvEscapeSymbol', { dataSource, preAggregations }), }; if (this.config.exportBucket) { this.config.exportBucket = diff --git a/packages/cubejs-backend-shared/src/env.ts b/packages/cubejs-backend-shared/src/env.ts index cb9072b5af01b..1f4d6123135b1 100644 --- a/packages/cubejs-backend-shared/src/env.ts +++ b/packages/cubejs-backend-shared/src/env.ts @@ -101,19 +101,18 @@ export function assertDataSource(dataSource = 'default'): string { /** * Returns data source specific environment variable name. - * @param origin Origin environment variable name. - * @param dataSource Data source name. */ -export function keyByDataSource(origin: string, dataSource?: string): string { +export function keyByDataSource(origin: string, dataSource?: string, preAggregations?: boolean): string { if (dataSource) assertDataSource(dataSource); - if (!isMultipleDataSources() || dataSource === 'default') { - return origin; - } else if (!dataSource) { - return origin; + + let key: string; + + if (!isMultipleDataSources() || dataSource === 'default' || !dataSource) { + key = origin; } else { const s = origin.split('CUBEJS_'); if (s.length === 2) { - return `CUBEJS_DS_${dataSource.toUpperCase()}_${s[1]}`; + key = `CUBEJS_DS_${dataSource.toUpperCase()}_${s[1]}`; } else { throw new Error( `The ${ @@ -124,6 +123,37 @@ export function keyByDataSource(origin: string, dataSource?: string): string { ); } } + + if (preAggregations) { + const dsMatch = key.match(/^(CUBEJS_DS_[A-Z0-9_]+?_)(DB_|JDBC_|AWS_|DATABASE|FIREBOLT_)(.*)/); + if (dsMatch) { + return `${dsMatch[1]}PRE_AGGREGATIONS_${dsMatch[2]}${dsMatch[3]}`; + } + + if (key.startsWith('CUBEJS_')) { + return key.replace(/^CUBEJS_/, 'CUBEJS_PRE_AGGREGATIONS_'); + } + } + + return key; +} + +type DataSourceOpts = { dataSource: string, preAggregations?: boolean }; + +/** + * Checks if at least one PRE_AGGREGATIONS env var is set for a given data source. + */ +export function hasPreAggregationsEnvVars(dataSource?: string): boolean { + const prefix = (!dataSource || dataSource === 'default') + ? 'CUBEJS_PRE_AGGREGATIONS_' + : `CUBEJS_DS_${dataSource.toUpperCase()}_PRE_AGGREGATIONS_`; + + for (const key of Object.keys(process.env)) { + if (key.startsWith(prefix)) { + return true; + } + } + return false; } function asPortOrSocket(input: string, envName: string): number | string { @@ -325,10 +355,9 @@ const variables: Record any> = { */ dbType: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_TYPE', dataSource)] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_TYPE', dataSource, preAggregations)).asString() ), /** @@ -336,12 +365,9 @@ const variables: Record any> = { */ dbSsl: ({ dataSource, - }: { - dataSource: string, - }) => { - const val = process.env[ - keyByDataSource('CUBEJS_DB_SSL', dataSource) - ] || 'false'; + preAggregations, + }: DataSourceOpts) => { + const val = get(keyByDataSource('CUBEJS_DB_SSL', dataSource, preAggregations)).default('false').asString(); if (val.toLocaleLowerCase() === 'true') { return true; } else if (val.toLowerCase() === 'false') { @@ -360,12 +386,9 @@ const variables: Record any> = { */ dbSslRejectUnauthorized: ({ dataSource, - }: { - dataSource: string, - }) => { - const val = process.env[ - keyByDataSource('CUBEJS_DB_SSL_REJECT_UNAUTHORIZED', dataSource) - ] || 'false'; + preAggregations, + }: DataSourceOpts) => { + const val = get(keyByDataSource('CUBEJS_DB_SSL_REJECT_UNAUTHORIZED', dataSource, preAggregations)).default('false').asString(); if (val.toLocaleLowerCase() === 'true') { return true; } else if (val.toLowerCase() === 'false') { @@ -384,12 +407,9 @@ const variables: Record any> = { */ dbUrl: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_URL', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_URL', dataSource, preAggregations)).asString() ), /** @@ -397,12 +417,9 @@ const variables: Record any> = { */ dbHost: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_HOST', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_HOST', dataSource, preAggregations)).asString() ), /** @@ -413,12 +430,9 @@ const variables: Record any> = { */ dbUseSelectTestConnection: ({ dataSource, - }: { - dataSource: string, - }) => { - const val = process.env[ - keyByDataSource('CUBEJS_DB_USE_SELECT_TEST_CONNECTION', dataSource) - ] || 'false'; + preAggregations, + }: DataSourceOpts) => { + const val = get(keyByDataSource('CUBEJS_DB_USE_SELECT_TEST_CONNECTION', dataSource, preAggregations)).default('false').asString(); if (val.toLocaleLowerCase() === 'true') { return true; } else if (val.toLowerCase() === 'false') { @@ -435,37 +449,29 @@ const variables: Record any> = { /** * Kafka host for direct downloads from ksqlDb */ - dbKafkaHost: ({ dataSource }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_KAFKA_HOST', dataSource)] + dbKafkaHost: ({ dataSource, preAggregations }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_KAFKA_HOST', dataSource, preAggregations)).asString() ), /** * Kafka user for direct downloads from ksqlDb */ - dbKafkaUser: ({ dataSource }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_KAFKA_USER', dataSource)] + dbKafkaUser: ({ dataSource, preAggregations }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_KAFKA_USER', dataSource, preAggregations)).asString() ), /** * Kafka password for direct downloads from ksqlDb */ - dbKafkaPass: ({ dataSource }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_KAFKA_PASS', dataSource)] + dbKafkaPass: ({ dataSource, preAggregations }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_KAFKA_PASS', dataSource, preAggregations)).asString() ), /** * `true` if Kafka should use SASL_SSL for direct downloads from ksqlDb */ - dbKafkaUseSsl: ({ dataSource }: { - dataSource: string, - }) => ( - get(keyByDataSource('CUBEJS_DB_KAFKA_USE_SSL', dataSource)) + dbKafkaUseSsl: ({ dataSource, preAggregations }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_KAFKA_USE_SSL', dataSource, preAggregations)) .default('false') .asBool() ), @@ -475,10 +481,9 @@ const variables: Record any> = { */ dbDomain: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_DOMAIN', dataSource)] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DOMAIN', dataSource, preAggregations)).asString() ), /** @@ -486,17 +491,9 @@ const variables: Record any> = { */ dbPort: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_PORT', dataSource)] - ? parseInt( - `${ - process.env[keyByDataSource('CUBEJS_DB_PORT', dataSource)] - }`, - 10, - ) - : undefined + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_PORT', dataSource, preAggregations)).asInt() ), /** @@ -504,10 +501,9 @@ const variables: Record any> = { */ dbSocketPath: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_SOCKET_PATH', dataSource)] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_SOCKET_PATH', dataSource, preAggregations)).asString() ), /** @@ -515,10 +511,9 @@ const variables: Record any> = { */ dbUser: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_USER', dataSource)] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_USER', dataSource, preAggregations)).asString() ), /** @@ -526,10 +521,9 @@ const variables: Record any> = { */ dbPass: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_PASS', dataSource)] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_PASS', dataSource, preAggregations)).asString() ), /** @@ -538,13 +532,13 @@ const variables: Record any> = { dbName: ({ required, dataSource, + preAggregations, }: { dataSource: string, required?: boolean, + preAggregations?: boolean, }) => { - const val = process.env[ - keyByDataSource('CUBEJS_DB_NAME', dataSource) - ]; + const val = get(keyByDataSource('CUBEJS_DB_NAME', dataSource, preAggregations)).asString(); if (required && !val) { throw new Error( `The ${ @@ -562,9 +556,11 @@ const variables: Record any> = { dbSchema: ({ required, dataSource, + preAggregations, }: { dataSource: string, required?: boolean, + preAggregations?: boolean, }) => { console.warn( `The ${ @@ -573,9 +569,7 @@ const variables: Record any> = { keyByDataSource('CUBEJS_DB_NAME', dataSource) } instead.` ); - const val = process.env[ - keyByDataSource('CUBEJS_DB_SCHEMA', dataSource) - ]; + const val = get(keyByDataSource('CUBEJS_DB_SCHEMA', dataSource, preAggregations)).asString(); if (required && !val) { throw new Error( `The ${ @@ -593,9 +587,11 @@ const variables: Record any> = { dbDatabase: ({ required, dataSource, + preAggregations, }: { dataSource: string, required?: boolean, + preAggregations?: boolean, }) => { console.warn( `The ${ @@ -604,9 +600,7 @@ const variables: Record any> = { keyByDataSource('CUBEJS_DB_NAME', dataSource) } instead.` ); - const val = process.env[ - keyByDataSource('CUBEJS_DATABASE', dataSource) - ]; + const val = get(keyByDataSource('CUBEJS_DATABASE', dataSource, preAggregations)).asString(); if (required && !val) { throw new Error( `The ${ @@ -622,19 +616,9 @@ const variables: Record any> = { */ dbMaxPoolSize: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_MAX_POOL', dataSource)] - ? parseInt( - `${ - process.env[ - keyByDataSource('CUBEJS_DB_MAX_POOL', dataSource) - ] - }`, - 10, - ) - : undefined + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_MAX_POOL', dataSource, preAggregations)).asInt() ), /** @@ -642,12 +626,11 @@ const variables: Record any> = { */ dbMinPoolSize: ({ dataSource, - }: { - dataSource: string, - }) => { - if (process.env[keyByDataSource('CUBEJS_DB_MIN_POOL', dataSource)]) { + preAggregations, + }: DataSourceOpts) => { + if (get(keyByDataSource('CUBEJS_DB_MIN_POOL', dataSource, preAggregations)).asString()) { const min = parseInt( - `${process.env[keyByDataSource('CUBEJS_DB_MIN_POOL', dataSource)]}`, + `${get(keyByDataSource('CUBEJS_DB_MIN_POOL', dataSource, preAggregations)).asString()}`, 10, ); if (min < 0) { @@ -670,11 +653,10 @@ const variables: Record any> = { */ dbPollMaxInterval: ({ dataSource, - }: { - dataSource: string, - }) => { + preAggregations, + }: DataSourceOpts) => { const key = keyByDataSource('CUBEJS_DB_POLL_MAX_INTERVAL', dataSource); - const value = process.env[key] || '5s'; + const value = get(keyByDataSource('CUBEJS_DB_POLL_MAX_INTERVAL', dataSource, preAggregations)).default('5s').asString(); return convertTimeStrToSeconds(value, key); }, @@ -684,11 +666,10 @@ const variables: Record any> = { */ dbPollTimeout: ({ dataSource, - }: { - dataSource: string, - }) => { + preAggregations, + }: DataSourceOpts) => { const key = keyByDataSource('CUBEJS_DB_POLL_TIMEOUT', dataSource); - const value = process.env[key]; + const value = get(keyByDataSource('CUBEJS_DB_POLL_TIMEOUT', dataSource, preAggregations)).asString(); if (value) { return convertTimeStrToSeconds(value, key); } else { @@ -706,11 +687,13 @@ const variables: Record any> = { */ dbQueryTimeout: ({ dataSource, + preAggregations, }: { dataSource?: string, + preAggregations?: boolean, } = {}) => { const key = keyByDataSource('CUBEJS_DB_QUERY_TIMEOUT', dataSource); - const value = process.env[key] || '10m'; + const value = get(keyByDataSource('CUBEJS_DB_QUERY_TIMEOUT', dataSource, preAggregations)).default('10m').asString(); return convertTimeStrToSeconds(value, key); }, @@ -812,10 +795,9 @@ const variables: Record any> = { */ jdbcUrl: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_JDBC_URL', dataSource)] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_JDBC_URL', dataSource, preAggregations)).asString() ), /** @@ -823,10 +805,9 @@ const variables: Record any> = { */ jdbcDriver: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_JDBC_DRIVER', dataSource)] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_JDBC_DRIVER', dataSource, preAggregations)).asString() ), /** **************************************************************** @@ -838,10 +819,9 @@ const variables: Record any> = { */ dbExportBucketCsvEscapeSymbol: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_CSV_ESCAPE_SYMBOL', dataSource)] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_CSV_ESCAPE_SYMBOL', dataSource, preAggregations)).asString() ), /** @@ -850,13 +830,13 @@ const variables: Record any> = { dbExportBucketType: ({ supported, dataSource, + preAggregations, }: { supported: ('s3' | 'gcp' | 'azure')[], dataSource: string, + preAggregations?: boolean, }) => { - const val = process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_TYPE', dataSource) - ]; + const val = get(keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_TYPE', dataSource, preAggregations)).asString(); if ( val && supported && @@ -876,10 +856,9 @@ const variables: Record any> = { */ dbExportBucket: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_EXPORT_BUCKET', dataSource)] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_EXPORT_BUCKET', dataSource, preAggregations)).asString() ), /** @@ -888,12 +867,9 @@ const variables: Record any> = { */ dbExportBucketMountDir: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_MOUNT_DIR', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_MOUNT_DIR', dataSource, preAggregations)).asString() ), /** @@ -901,12 +877,9 @@ const variables: Record any> = { */ dbExportBucketAwsKey: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AWS_KEY', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AWS_KEY', dataSource, preAggregations)).asString() ), /** @@ -914,12 +887,9 @@ const variables: Record any> = { */ dbExportBucketAwsSecret: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET', dataSource, preAggregations)).asString() ), /** @@ -927,12 +897,9 @@ const variables: Record any> = { */ dbExportBucketAwsRegion: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AWS_REGION', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AWS_REGION', dataSource, preAggregations)).asString() ), /** @@ -940,12 +907,9 @@ const variables: Record any> = { */ dbExportBucketAzureKey: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY', dataSource, preAggregations)).asString() ), /** @@ -953,12 +917,9 @@ const variables: Record any> = { */ dbExportAzureSasToken: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_SAS_TOKEN', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_SAS_TOKEN', dataSource, preAggregations)).asString() ), /** @@ -966,12 +927,9 @@ const variables: Record any> = { */ dbExportBucketAzureClientId: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_ID', dataSource, preAggregations)).asString() ), /** @@ -979,12 +937,9 @@ const variables: Record any> = { */ dbExportBucketAzureClientSecret: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_CLIENT_SECRET', dataSource, preAggregations)).asString() ), /** @@ -992,12 +947,9 @@ const variables: Record any> = { */ dbExportBucketAzureTokenFilePAth: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_FEDERATED_TOKEN_FILE', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_FEDERATED_TOKEN_FILE', dataSource, preAggregations)).asString() ), /** @@ -1005,12 +957,9 @@ const variables: Record any> = { */ dbExportBucketAzureTenantId: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_AZURE_TENANT_ID', dataSource, preAggregations)).asString() ), /** @@ -1018,12 +967,9 @@ const variables: Record any> = { */ dbExportIntegration: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_INTEGRATION', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_EXPORT_INTEGRATION', dataSource, preAggregations)).asString() ), /** @@ -1031,12 +977,9 @@ const variables: Record any> = { */ dbExportGCSCredentials: ({ dataSource, - }: { - dataSource: string, - }) => { - const credentials = process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_GCS_CREDENTIALS', dataSource) - ]; + preAggregations, + }: DataSourceOpts) => { + const credentials = get(keyByDataSource('CUBEJS_DB_EXPORT_GCS_CREDENTIALS', dataSource, preAggregations)).asString(); if (credentials) { return JSON.parse( Buffer.from(credentials, 'base64').toString('utf8') @@ -1055,8 +998,8 @@ const variables: Record any> = { * @see https://dev.mysql.com/doc/refman/8.4/en/date-and-time-functions.html#function_convert-tz * @see https://dev.mysql.com/doc/refman/8.4/en/time-zone-support.html */ - mysqlUseNamedTimezones: ({ dataSource }: { dataSource: string }) => ( - get(keyByDataSource('CUBEJS_DB_MYSQL_USE_NAMED_TIMEZONES', dataSource)) + mysqlUseNamedTimezones: ({ dataSource, preAggregations }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_MYSQL_USE_NAMED_TIMEZONES', dataSource, preAggregations)) // It's true in schema-compiler integration tests .default('false') .asBool() @@ -1072,8 +1015,8 @@ const variables: Record any> = { * * @see https://learn.microsoft.com/en-us/sql/t-sql/queries/at-time-zone-transact-sql */ - mssqlUseNamedTimezones: ({ dataSource }: { dataSource: string }) => ( - get(keyByDataSource('CUBEJS_DB_MSSQL_USE_NAMED_TIMEZONES', dataSource)) + mssqlUseNamedTimezones: ({ dataSource, preAggregations }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_MSSQL_USE_NAMED_TIMEZONES', dataSource, preAggregations)) // It's true in schema-compiler integration tests .default('false') .asBool() @@ -1088,12 +1031,9 @@ const variables: Record any> = { */ databricksUrl: ({ dataSource, - }: { - dataSource: string, - }) => { - const val = process.env[ - keyByDataSource('CUBEJS_DB_DATABRICKS_URL', dataSource) - ]; + preAggregations, + }: DataSourceOpts) => { + const val = get(keyByDataSource('CUBEJS_DB_DATABRICKS_URL', dataSource, preAggregations)).asString(); if (!val) { throw new Error( `The ${ @@ -1109,12 +1049,9 @@ const variables: Record any> = { */ databricksToken: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DATABRICKS_TOKEN', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DATABRICKS_TOKEN', dataSource, preAggregations)).asString() ), /** @@ -1123,23 +1060,17 @@ const variables: Record any> = { */ databricksCatalog: ({ dataSource, - }: { - dataSource: string, - }) => process.env[ - keyByDataSource('CUBEJS_DB_DATABRICKS_CATALOG', dataSource) - ], + preAggregations, + }: DataSourceOpts) => get(keyByDataSource('CUBEJS_DB_DATABRICKS_CATALOG', dataSource, preAggregations)).asString(), /** * Databricks OAuth Client ID (Same as the service principal UUID) */ databricksOAuthClientId: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DATABRICKS_OAUTH_CLIENT_ID', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DATABRICKS_OAUTH_CLIENT_ID', dataSource, preAggregations)).asString() ), /** @@ -1147,12 +1078,9 @@ const variables: Record any> = { */ databricksOAuthClientSecret: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DATABRICKS_OAUTH_CLIENT_SECRET', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DATABRICKS_OAUTH_CLIENT_SECRET', dataSource, preAggregations)).asString() ), /** **************************************************************** @@ -1163,104 +1091,86 @@ const variables: Record any> = { * Athena AWS key. */ athenaAwsKey: ({ - dataSource - }: { - dataSource: string, - }) => ( + dataSource, + preAggregations, + }: DataSourceOpts) => ( // TODO (buntarb): this name is a common. Deprecate and replace? - process.env[keyByDataSource('CUBEJS_AWS_KEY', dataSource)] + get(keyByDataSource('CUBEJS_AWS_KEY', dataSource, preAggregations)).asString() ), /** * Athena AWS secret. */ athenaAwsSecret: ({ - dataSource - }: { - dataSource: string, - }) => ( + dataSource, + preAggregations, + }: DataSourceOpts) => ( // TODO (buntarb): this name is a common. Deprecate and replace? - process.env[keyByDataSource('CUBEJS_AWS_SECRET', dataSource)] + get(keyByDataSource('CUBEJS_AWS_SECRET', dataSource, preAggregations)).asString() ), /** * Athena AWS region. */ athenaAwsRegion: ({ - dataSource - }: { - dataSource: string, - }) => ( + dataSource, + preAggregations, + }: DataSourceOpts) => ( // TODO (buntarb): this name is a common. Deprecate and replace? - process.env[keyByDataSource('CUBEJS_AWS_REGION', dataSource)] + get(keyByDataSource('CUBEJS_AWS_REGION', dataSource, preAggregations)).asString() ), /** * Athena AWS S3 output location. */ athenaAwsS3OutputLocation: ({ - dataSource - }: { - dataSource: string, - }) => ( + dataSource, + preAggregations, + }: DataSourceOpts) => ( // TODO (buntarb): this name is a common. Deprecate and replace? - process.env[ - keyByDataSource('CUBEJS_AWS_S3_OUTPUT_LOCATION', dataSource) - ] + get(keyByDataSource('CUBEJS_AWS_S3_OUTPUT_LOCATION', dataSource, preAggregations)).asString() ), /** * Athena AWS workgroup. */ athenaAwsWorkgroup: ({ - dataSource - }: { - dataSource: string, - }) => ( + dataSource, + preAggregations, + }: DataSourceOpts) => ( // TODO (buntarb): Deprecate and replace? - process.env[ - keyByDataSource('CUBEJS_AWS_ATHENA_WORKGROUP', dataSource) - ] + get(keyByDataSource('CUBEJS_AWS_ATHENA_WORKGROUP', dataSource, preAggregations)).asString() ), /** * Athena AWS Catalog. */ athenaAwsCatalog: ({ - dataSource - }: { - dataSource: string, - }) => ( + dataSource, + preAggregations, + }: DataSourceOpts) => ( // TODO (buntarb): Deprecate and replace? - process.env[ - keyByDataSource('CUBEJS_AWS_ATHENA_CATALOG', dataSource) - ] + get(keyByDataSource('CUBEJS_AWS_ATHENA_CATALOG', dataSource, preAggregations)).asString() ), /** * Athena AWS Assume Role ARN. */ athenaAwsAssumeRoleArn: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_AWS_ATHENA_ASSUME_ROLE_ARN', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_AWS_ATHENA_ASSUME_ROLE_ARN', dataSource, preAggregations)).asString() ), /** * Athena AWS Assume Role External ID. */ athenaAwsAssumeRoleExternalId: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_AWS_ATHENA_ASSUME_ROLE_EXTERNAL_ID', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_AWS_ATHENA_ASSUME_ROLE_EXTERNAL_ID', dataSource, preAggregations)).asString() ), /** **************************************************************** @@ -1271,46 +1181,40 @@ const variables: Record any> = { * BigQuery project ID. */ bigqueryProjectId: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_BQ_PROJECT_ID', dataSource)] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_BQ_PROJECT_ID', dataSource, preAggregations)).asString() ), /** * BigQuery Key file. */ bigqueryKeyFile: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_BQ_KEY_FILE', dataSource)] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_BQ_KEY_FILE', dataSource, preAggregations)).asString() ), /** * BigQuery credentials. */ bigqueryCredentials: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_BQ_CREDENTIALS', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_BQ_CREDENTIALS', dataSource, preAggregations)).asString() ), /** * BigQuery location. */ bigqueryLocation: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[keyByDataSource('CUBEJS_DB_BQ_LOCATION', dataSource)] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_BQ_LOCATION', dataSource, preAggregations)).asString() ), /** @@ -1318,17 +1222,14 @@ const variables: Record any> = { * @deprecated */ bigqueryExportBucket: ({ - dataSource - }: { - dataSource: string, - }) => { + dataSource, + preAggregations, + }: DataSourceOpts) => { console.warn( 'The CUBEJS_DB_BQ_EXPORT_BUCKET is deprecated. ' + 'Please, use the CUBEJS_DB_EXPORT_BUCKET instead.' ); - return process.env[ - keyByDataSource('CUBEJS_DB_BQ_EXPORT_BUCKET', dataSource) - ]; + return get(keyByDataSource('CUBEJS_DB_BQ_EXPORT_BUCKET', dataSource, preAggregations)).asString(); }, /** **************************************************************** @@ -1339,11 +1240,10 @@ const variables: Record any> = { * ClickHouse read only flag. */ clickhouseReadOnly: ({ - dataSource - }: { - dataSource: string, - }) => ( - get(keyByDataSource('CUBEJS_DB_CLICKHOUSE_READONLY', dataSource)) + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_CLICKHOUSE_READONLY', dataSource, preAggregations)) .default('false') .asBool() ), @@ -1352,11 +1252,10 @@ const variables: Record any> = { * ClickHouse compression flag. */ clickhouseCompression: ({ - dataSource - }: { - dataSource: string, - }) => ( - get(keyByDataSource('CUBEJS_DB_CLICKHOUSE_COMPRESSION', dataSource)) + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_CLICKHOUSE_COMPRESSION', dataSource, preAggregations)) .default('false') .asBool() ), @@ -1369,52 +1268,40 @@ const variables: Record any> = { * ElasticSearch API Id. */ elasticApiId: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_ELASTIC_APIKEY_ID', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_ELASTIC_APIKEY_ID', dataSource, preAggregations)).asString() ), /** * ElasticSearch API Key. */ elasticApiKey: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_ELASTIC_APIKEY_KEY', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_ELASTIC_APIKEY_KEY', dataSource, preAggregations)).asString() ), /** * ElasticSearch OpenDistro flag. */ elasticOpenDistro: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_ELASTIC_OPENDISTRO', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_ELASTIC_OPENDISTRO', dataSource, preAggregations)).asString() ), /** * ElasticSearch query format. */ elasticQueryFormat: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_ELASTIC_QUERY_FORMAT', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_ELASTIC_QUERY_FORMAT', dataSource, preAggregations)).asString() ), /** **************************************************************** @@ -1425,52 +1312,40 @@ const variables: Record any> = { * Firebolt API endpoint. */ fireboltApiEndpoint: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_FIREBOLT_API_ENDPOINT', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_FIREBOLT_API_ENDPOINT', dataSource, preAggregations)).asString() ), /** * Firebolt engine name. */ fireboltEngineName: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_FIREBOLT_ENGINE_NAME', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_FIREBOLT_ENGINE_NAME', dataSource, preAggregations)).asString() ), /** * Firebolt engine endpoint. */ fireboltEngineEndpoint: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_FIREBOLT_ENGINE_ENDPOINT', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_FIREBOLT_ENGINE_ENDPOINT', dataSource, preAggregations)).asString() ), /** * Firebolt account name. */ fireboltAccount: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_FIREBOLT_ACCOUNT', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_FIREBOLT_ACCOUNT', dataSource, preAggregations)).asString() ), /** **************************************************************** @@ -1481,52 +1356,40 @@ const variables: Record any> = { * Hive type. */ hiveType: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_HIVE_TYPE', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_HIVE_TYPE', dataSource, preAggregations)).asString() ), /** * Hive version. */ hiveVer: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_HIVE_VER', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_HIVE_VER', dataSource, preAggregations)).asString() ), /** * Hive thrift version. */ hiveThriftVer: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_HIVE_THRIFT_VER', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_HIVE_THRIFT_VER', dataSource, preAggregations)).asString() ), /** * Hive CDH version. */ hiveCdhVer: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_HIVE_CDH_VER', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_HIVE_CDH_VER', dataSource, preAggregations)).asString() ), /** **************************************************************** @@ -1537,26 +1400,20 @@ const variables: Record any> = { * Aurora secret ARN. */ auroraSecretArn: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DATABASE_SECRET_ARN', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DATABASE_SECRET_ARN', dataSource, preAggregations)).asString() ), /** * Aurora cluster ARN. */ auroraClusterArn: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DATABASE_CLUSTER_ARN', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DATABASE_CLUSTER_ARN', dataSource, preAggregations)).asString() ), /** **************************************************************** @@ -1567,78 +1424,60 @@ const variables: Record any> = { * Redshift export bucket unload ARN. */ redshiftUnloadArn: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_REDSHIFT_ARN', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_EXPORT_BUCKET_REDSHIFT_ARN', dataSource, preAggregations)).asString() ), /** * Redshift AWS region for IAM authentication. */ redshiftAwsRegion: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_REDSHIFT_AWS_REGION', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_REDSHIFT_AWS_REGION', dataSource, preAggregations)).asString() ), /** * Redshift provisioned cluster identifier for IAM authentication. */ redshiftClusterIdentifier: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_REDSHIFT_CLUSTER_IDENTIFIER', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_REDSHIFT_CLUSTER_IDENTIFIER', dataSource, preAggregations)).asString() ), /** * Redshift Serverless workgroup name for IAM authentication. */ redshiftWorkgroupName: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_REDSHIFT_WORKGROUP_NAME', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_REDSHIFT_WORKGROUP_NAME', dataSource, preAggregations)).asString() ), /** * Redshift IAM Assume Role ARN for cross-account access. */ redshiftAssumeRoleArn: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_REDSHIFT_ASSUME_ROLE_ARN', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_REDSHIFT_ASSUME_ROLE_ARN', dataSource, preAggregations)).asString() ), /** * Redshift IAM Assume Role External ID. */ redshiftAssumeRoleExternalId: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_REDSHIFT_ASSUME_ROLE_EXTERNAL_ID', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_REDSHIFT_ASSUME_ROLE_EXTERNAL_ID', dataSource, preAggregations)).asString() ), /** **************************************************************** @@ -1649,13 +1488,10 @@ const variables: Record any> = { * Materialize cluster. */ materializeCluster: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_MATERIALIZE_CLUSTER', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_MATERIALIZE_CLUSTER', dataSource, preAggregations)).asString() ), /** **************************************************************** @@ -1666,68 +1502,50 @@ const variables: Record any> = { * Snowflake account. */ snowflakeAccount: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_SNOWFLAKE_ACCOUNT', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_SNOWFLAKE_ACCOUNT', dataSource, preAggregations)).asString() ), /** * Snowflake region. */ snowflakeRegion: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_SNOWFLAKE_REGION', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_SNOWFLAKE_REGION', dataSource, preAggregations)).asString() ), /** * Snowflake warehouse. */ snowflakeWarehouse: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_SNOWFLAKE_WAREHOUSE', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_SNOWFLAKE_WAREHOUSE', dataSource, preAggregations)).asString() ), /** * Snowflake role. */ snowflakeRole: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_SNOWFLAKE_ROLE', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_SNOWFLAKE_ROLE', dataSource, preAggregations)).asString() ), /** * Snowflake session keep alive flag. */ snowflakeSessionKeepAlive: ({ - dataSource - }: { - dataSource: string, - }) => { - const val = process.env[ - keyByDataSource( - 'CUBEJS_DB_SNOWFLAKE_CLIENT_SESSION_KEEP_ALIVE', - dataSource, - ) - ]; + dataSource, + preAggregations, + }: DataSourceOpts) => { + const val = get(keyByDataSource('CUBEJS_DB_SNOWFLAKE_CLIENT_SESSION_KEEP_ALIVE', dataSource, preAggregations)).asString(); if (val) { if (val.toLocaleLowerCase() === 'true') { return true; @@ -1752,107 +1570,80 @@ const variables: Record any> = { * Snowflake authenticator. */ snowflakeAuthenticator: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_SNOWFLAKE_AUTHENTICATOR', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_SNOWFLAKE_AUTHENTICATOR', dataSource, preAggregations)).asString() ), /** * Snowflake OAuth token (string). */ snowflakeOAuthToken: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_SNOWFLAKE_OAUTH_TOKEN', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_SNOWFLAKE_OAUTH_TOKEN', dataSource, preAggregations)).asString() ), /** * Snowflake OAuth token path. */ snowflakeOAuthTokenPath: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_SNOWFLAKE_OAUTH_TOKEN_PATH', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_SNOWFLAKE_OAUTH_TOKEN_PATH', dataSource, preAggregations)).asString() ), /** * Snowflake host. */ snowflakeHost: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_SNOWFLAKE_HOST', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_SNOWFLAKE_HOST', dataSource, preAggregations)).asString() ), /** * Snowflake private key. */ snowflakePrivateKey: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY', dataSource, preAggregations)).asString() ), /** * Snowflake private key path. */ snowflakePrivateKeyPath: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PATH', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PATH', dataSource, preAggregations)).asString() ), /** * Snowflake private key pass. */ snowflakePrivateKeyPass: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PASS', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PASS', dataSource, preAggregations)).asString() ), /** * Snowflake case sensitivity for identifiers (like database columns). */ snowflakeQuotedIdentIgnoreCase: ({ - dataSource - }: { - dataSource: string, - }) => { - const val = process.env[ - keyByDataSource( - 'CUBEJS_DB_SNOWFLAKE_QUOTED_IDENTIFIERS_IGNORE_CASE', - dataSource, - ) - ]; + dataSource, + preAggregations, + }: DataSourceOpts) => { + const val = get(keyByDataSource('CUBEJS_DB_SNOWFLAKE_QUOTED_IDENTIFIERS_IGNORE_CASE', dataSource, preAggregations)).asString(); if (val) { if (val.toLocaleLowerCase() === 'true') { return true; @@ -1881,16 +1672,13 @@ const variables: Record any> = { */ dbCatalog: ({ dataSource, - }: { - dataSource: string, - }) => { + preAggregations, + }: DataSourceOpts) => { console.warn( 'The CUBEJS_DB_CATALOG is deprecated. ' + 'Please, use the CUBEJS_DB_PRESTO_CATALOG instead.' ); - return process.env[ - keyByDataSource('CUBEJS_DB_CATALOG', dataSource) - ]; + return get(keyByDataSource('CUBEJS_DB_CATALOG', dataSource, preAggregations)).asString(); }, /** **************************************************************** @@ -1898,123 +1686,87 @@ const variables: Record any> = { ***************************************************************** */ duckdbMotherDuckToken: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DUCKDB_MOTHERDUCK_TOKEN', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DUCKDB_MOTHERDUCK_TOKEN', dataSource, preAggregations)).asString() ), duckdbDatabasePath: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DUCKDB_DATABASE_PATH', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DUCKDB_DATABASE_PATH', dataSource, preAggregations)).asString() ), duckdbS3Region: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DUCKDB_S3_REGION', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DUCKDB_S3_REGION', dataSource, preAggregations)).asString() ), duckdbS3AccessKeyId: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DUCKDB_S3_ACCESS_KEY_ID', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DUCKDB_S3_ACCESS_KEY_ID', dataSource, preAggregations)).asString() ), duckdbS3SecretAccessKeyId: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DUCKDB_S3_SECRET_ACCESS_KEY', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DUCKDB_S3_SECRET_ACCESS_KEY', dataSource, preAggregations)).asString() ), duckdbS3Endpoint: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DUCKDB_S3_ENDPOINT', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DUCKDB_S3_ENDPOINT', dataSource, preAggregations)).asString() ), duckdbMemoryLimit: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DUCKDB_MEMORY_LIMIT', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DUCKDB_MEMORY_LIMIT', dataSource, preAggregations)).asString() ), duckdbSchema: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DUCKDB_SCHEMA', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DUCKDB_SCHEMA', dataSource, preAggregations)).asString() ), duckdbS3UseSsl: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DUCKDB_S3_USE_SSL', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DUCKDB_S3_USE_SSL', dataSource, preAggregations)).asString() ), duckdbS3UrlStyle: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DUCKDB_S3_URL_STYLE', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DUCKDB_S3_URL_STYLE', dataSource, preAggregations)).asString() ), duckdbS3SessionToken: ({ - dataSource - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DUCKDB_S3_SESSION_TOKEN', dataSource) - ] + dataSource, + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DUCKDB_S3_SESSION_TOKEN', dataSource, preAggregations)).asString() ), duckdbExtensions: ({ - dataSource - }: { - dataSource: string, - }) => { - const extensions = process.env[ - keyByDataSource('CUBEJS_DB_DUCKDB_EXTENSIONS', dataSource) - ]; + dataSource, + preAggregations, + }: DataSourceOpts) => { + const extensions = get(keyByDataSource('CUBEJS_DB_DUCKDB_EXTENSIONS', dataSource, preAggregations)).asString(); if (extensions) { return extensions.split(',').map(e => e.trim()); } @@ -2022,13 +1774,10 @@ const variables: Record any> = { }, duckdbCommunityExtensions: ({ - dataSource - }: { - dataSource: string, - }) => { - const extensions = process.env[ - keyByDataSource('CUBEJS_DB_DUCKDB_COMMUNITY_EXTENSIONS', dataSource) - ]; + dataSource, + preAggregations, + }: DataSourceOpts) => { + const extensions = get(keyByDataSource('CUBEJS_DB_DUCKDB_COMMUNITY_EXTENSIONS', dataSource, preAggregations)).asString(); if (extensions) { return extensions.split(',').map(e => e.trim()); } @@ -2036,13 +1785,10 @@ const variables: Record any> = { }, duckdbS3UseCredentialChain: ({ - dataSource - }: { - dataSource: string, - }) => { - const val = process.env[ - keyByDataSource('CUBEJS_DB_DUCKDB_S3_USE_CREDENTIAL_CHAIN', dataSource) - ]; + dataSource, + preAggregations, + }: DataSourceOpts) => { + const val = get(keyByDataSource('CUBEJS_DB_DUCKDB_S3_USE_CREDENTIAL_CHAIN', dataSource, preAggregations)).asString(); if (val) { if (val.toLocaleLowerCase() === 'true') { @@ -2073,12 +1819,9 @@ const variables: Record any> = { */ prestoCatalog: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_PRESTO_CATALOG', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_PRESTO_CATALOG', dataSource, preAggregations)).asString() ), /** @@ -2086,12 +1829,9 @@ const variables: Record any> = { */ prestoAuthToken: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_PRESTO_AUTH_TOKEN', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_PRESTO_AUTH_TOKEN', dataSource, preAggregations)).asString() ), /** *************************************************************** @@ -2103,22 +1843,17 @@ const variables: Record any> = { */ pinotAuthToken: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_PINOT_AUTH_TOKEN', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_PINOT_AUTH_TOKEN', dataSource, preAggregations)).asString() ), /** * Pinot / Startree Null value support */ - pinotNullHandling: ({ dataSource }: { dataSource: string }) => { - const val = process.env[ - keyByDataSource('CUBEJS_DB_PINOT_NULL_HANDLING', dataSource) - ]; + pinotNullHandling: ({ dataSource, preAggregations }: DataSourceOpts) => { + const val = get(keyByDataSource('CUBEJS_DB_PINOT_NULL_HANDLING', dataSource, preAggregations)).asString(); if (val) { if (val.toLocaleLowerCase() === 'true') { @@ -2149,12 +1884,9 @@ const variables: Record any> = { */ dremioAuthToken: ({ dataSource, - }: { - dataSource: string, - }) => ( - process.env[ - keyByDataSource('CUBEJS_DB_DREMIO_AUTH_TOKEN', dataSource) - ] + preAggregations, + }: DataSourceOpts) => ( + get(keyByDataSource('CUBEJS_DB_DREMIO_AUTH_TOKEN', dataSource, preAggregations)).asString() ), /** **************************************************************** diff --git a/packages/cubejs-backend-shared/src/index.ts b/packages/cubejs-backend-shared/src/index.ts index 77ec6de45053e..dd52dfa787d9c 100644 --- a/packages/cubejs-backend-shared/src/index.ts +++ b/packages/cubejs-backend-shared/src/index.ts @@ -2,6 +2,7 @@ export { getEnv, assertDataSource, keyByDataSource, + hasPreAggregationsEnvVars, isDockerImage, convertSizeToBytes, } from './env'; diff --git a/packages/cubejs-backend-shared/test/db_env_pre_aggregations.test.ts b/packages/cubejs-backend-shared/test/db_env_pre_aggregations.test.ts new file mode 100644 index 0000000000000..2757ac403d463 --- /dev/null +++ b/packages/cubejs-backend-shared/test/db_env_pre_aggregations.test.ts @@ -0,0 +1,192 @@ +import { getEnv, keyByDataSource, hasPreAggregationsEnvVars } from '../src/env'; + +// Clean up any leftover datasources config +delete process.env.CUBEJS_DATASOURCES; + +describe('Pre-aggregation env vars (single datasource)', () => { + afterEach(() => { + delete process.env.CUBEJS_DB_TYPE; + delete process.env.CUBEJS_PRE_AGGREGATIONS_DB_TYPE; + delete process.env.CUBEJS_DB_HOST; + delete process.env.CUBEJS_PRE_AGGREGATIONS_DB_HOST; + delete process.env.CUBEJS_DB_USER; + delete process.env.CUBEJS_PRE_AGGREGATIONS_DB_USER; + delete process.env.CUBEJS_DB_PASS; + delete process.env.CUBEJS_PRE_AGGREGATIONS_DB_PASS; + delete process.env.CUBEJS_DB_SSL; + delete process.env.CUBEJS_PRE_AGGREGATIONS_DB_SSL; + delete process.env.CUBEJS_DB_PORT; + delete process.env.CUBEJS_PRE_AGGREGATIONS_DB_PORT; + }); + + test('preAggregations: true reads PRE_AGGREGATIONS variant', () => { + process.env.CUBEJS_DB_HOST = 'regular-host'; + process.env.CUBEJS_PRE_AGGREGATIONS_DB_HOST = 'preagg-host'; + + expect(getEnv('dbHost', { dataSource: 'default', preAggregations: true })) + .toEqual('preagg-host'); + expect(getEnv('dbHost', { dataSource: 'default', preAggregations: false })) + .toEqual('regular-host'); + expect(getEnv('dbHost', { dataSource: 'default' })) + .toEqual('regular-host'); + }); + + test('preAggregations: true returns undefined when PRE_AGGREGATIONS variant not set', () => { + process.env.CUBEJS_DB_HOST = 'regular-host'; + + expect(getEnv('dbHost', { dataSource: 'default', preAggregations: true })) + .toBeUndefined(); + }); + + test('preAggregations: false ignores PRE_AGGREGATIONS variant even when set', () => { + process.env.CUBEJS_DB_HOST = 'regular-host'; + process.env.CUBEJS_PRE_AGGREGATIONS_DB_HOST = 'preagg-host'; + + expect(getEnv('dbHost', { dataSource: 'default', preAggregations: false })) + .toEqual('regular-host'); + }); + + test('works with dbType', () => { + process.env.CUBEJS_DB_TYPE = 'postgres'; + process.env.CUBEJS_PRE_AGGREGATIONS_DB_TYPE = 'duckdb'; + + expect(getEnv('dbType', { dataSource: 'default', preAggregations: true })) + .toEqual('duckdb'); + expect(getEnv('dbType', { dataSource: 'default' })) + .toEqual('postgres'); + }); + + test('works with dbUser/dbPass', () => { + process.env.CUBEJS_DB_USER = 'regular-user'; + process.env.CUBEJS_PRE_AGGREGATIONS_DB_USER = 'preagg-user'; + process.env.CUBEJS_DB_PASS = 'regular-pass'; + process.env.CUBEJS_PRE_AGGREGATIONS_DB_PASS = 'preagg-pass'; + + expect(getEnv('dbUser', { dataSource: 'default', preAggregations: true })) + .toEqual('preagg-user'); + expect(getEnv('dbPass', { dataSource: 'default', preAggregations: true })) + .toEqual('preagg-pass'); + expect(getEnv('dbUser', { dataSource: 'default' })) + .toEqual('regular-user'); + expect(getEnv('dbPass', { dataSource: 'default' })) + .toEqual('regular-pass'); + }); + + test('works with dbSsl (boolean parsing)', () => { + process.env.CUBEJS_DB_SSL = 'false'; + process.env.CUBEJS_PRE_AGGREGATIONS_DB_SSL = 'true'; + + expect(getEnv('dbSsl', { dataSource: 'default', preAggregations: true })) + .toEqual(true); + expect(getEnv('dbSsl', { dataSource: 'default' })) + .toEqual(false); + }); + + test('works with dbPort (int parsing)', () => { + process.env.CUBEJS_DB_PORT = '5432'; + process.env.CUBEJS_PRE_AGGREGATIONS_DB_PORT = '5433'; + + expect(getEnv('dbPort', { dataSource: 'default', preAggregations: true })) + .toEqual(5433); + expect(getEnv('dbPort', { dataSource: 'default' })) + .toEqual(5432); + }); + + test('keyByDataSource with preAggregations flag', () => { + expect(keyByDataSource('CUBEJS_DB_HOST', 'default', true)) + .toEqual('CUBEJS_PRE_AGGREGATIONS_DB_HOST'); + expect(keyByDataSource('CUBEJS_DB_HOST', 'default', false)) + .toEqual('CUBEJS_DB_HOST'); + expect(keyByDataSource('CUBEJS_DB_HOST', 'default')) + .toEqual('CUBEJS_DB_HOST'); + }); +}); + +describe('Pre-aggregation env vars (multi datasource)', () => { + beforeEach(() => { + process.env.CUBEJS_DATASOURCES = 'default,analytics'; + }); + + afterEach(() => { + delete process.env.CUBEJS_DATASOURCES; + delete process.env.CUBEJS_DB_HOST; + delete process.env.CUBEJS_PRE_AGGREGATIONS_DB_HOST; + delete process.env.CUBEJS_DS_ANALYTICS_DB_HOST; + delete process.env.CUBEJS_DS_ANALYTICS_PRE_AGGREGATIONS_DB_HOST; + delete process.env.CUBEJS_DB_TYPE; + delete process.env.CUBEJS_DS_ANALYTICS_DB_TYPE; + delete process.env.CUBEJS_DS_ANALYTICS_PRE_AGGREGATIONS_DB_TYPE; + }); + + test('multi-datasource: PRE_AGGREGATIONS variant for named datasource', () => { + process.env.CUBEJS_DS_ANALYTICS_DB_HOST = 'analytics-host'; + process.env.CUBEJS_DS_ANALYTICS_PRE_AGGREGATIONS_DB_HOST = 'analytics-preagg-host'; + + expect(getEnv('dbHost', { dataSource: 'analytics', preAggregations: true })) + .toEqual('analytics-preagg-host'); + expect(getEnv('dbHost', { dataSource: 'analytics' })) + .toEqual('analytics-host'); + }); + + test('multi-datasource: returns undefined when PRE_AGGREGATIONS variant not set', () => { + process.env.CUBEJS_DS_ANALYTICS_DB_HOST = 'analytics-host'; + + expect(getEnv('dbHost', { dataSource: 'analytics', preAggregations: true })) + .toBeUndefined(); + }); + + test('multi-datasource: default datasource uses CUBEJS_PRE_AGGREGATIONS prefix', () => { + process.env.CUBEJS_DB_HOST = 'default-host'; + process.env.CUBEJS_PRE_AGGREGATIONS_DB_HOST = 'default-preagg-host'; + + expect(getEnv('dbHost', { dataSource: 'default', preAggregations: true })) + .toEqual('default-preagg-host'); + expect(getEnv('dbHost', { dataSource: 'default' })) + .toEqual('default-host'); + }); + + test('multi-datasource: different types for pre-aggregation', () => { + process.env.CUBEJS_DS_ANALYTICS_DB_TYPE = 'postgres'; + process.env.CUBEJS_DS_ANALYTICS_PRE_AGGREGATIONS_DB_TYPE = 'duckdb'; + + expect(getEnv('dbType', { dataSource: 'analytics', preAggregations: true })) + .toEqual('duckdb'); + expect(getEnv('dbType', { dataSource: 'analytics' })) + .toEqual('postgres'); + }); + + test('keyByDataSource with preAggregations for named datasource', () => { + expect(keyByDataSource('CUBEJS_DB_HOST', 'analytics', true)) + .toEqual('CUBEJS_DS_ANALYTICS_PRE_AGGREGATIONS_DB_HOST'); + expect(keyByDataSource('CUBEJS_DB_HOST', 'analytics')) + .toEqual('CUBEJS_DS_ANALYTICS_DB_HOST'); + }); +}); + +describe('hasPreAggregationsEnvVars', () => { + afterEach(() => { + delete process.env.CUBEJS_PRE_AGGREGATIONS_DB_HOST; + delete process.env.CUBEJS_DS_ANALYTICS_PRE_AGGREGATIONS_DB_HOST; + delete process.env.CUBEJS_DATASOURCES; + }); + + test('returns false when no PRE_AGGREGATIONS vars set', () => { + expect(hasPreAggregationsEnvVars('default')).toBe(false); + }); + + test('returns true when a PRE_AGGREGATIONS var is set for default', () => { + process.env.CUBEJS_PRE_AGGREGATIONS_DB_HOST = 'some-host'; + expect(hasPreAggregationsEnvVars('default')).toBe(true); + }); + + test('returns true when a PRE_AGGREGATIONS var is set for named datasource', () => { + process.env.CUBEJS_DS_ANALYTICS_PRE_AGGREGATIONS_DB_HOST = 'some-host'; + expect(hasPreAggregationsEnvVars('analytics')).toBe(true); + }); + + test('returns false for non-matching datasource', () => { + process.env.CUBEJS_DS_ANALYTICS_PRE_AGGREGATIONS_DB_HOST = 'some-host'; + expect(hasPreAggregationsEnvVars('default')).toBe(false); + expect(hasPreAggregationsEnvVars('other')).toBe(false); + }); +}); diff --git a/packages/cubejs-base-driver/src/BaseDriver.ts b/packages/cubejs-base-driver/src/BaseDriver.ts index 7339d0bcb560e..a9a12143f0228 100644 --- a/packages/cubejs-base-driver/src/BaseDriver.ts +++ b/packages/cubejs-base-driver/src/BaseDriver.ts @@ -265,10 +265,10 @@ export abstract class BaseDriver implements DriverInterface { return 'columns.table_name'; } - protected getSslOptions(dataSource: string): TLSConnectionOptions | undefined { + protected getSslOptions(dataSource: string, preAggregations?: boolean): TLSConnectionOptions | undefined { if ( - getEnv('dbSsl', { dataSource }) || - getEnv('dbSslRejectUnauthorized', { dataSource }) + getEnv('dbSsl', { dataSource, preAggregations }) || + getEnv('dbSslRejectUnauthorized', { dataSource, preAggregations }) ) { const sslOptions = [{ name: 'ca', @@ -337,7 +337,7 @@ export abstract class BaseDriver implements DriverInterface { {} ); - ssl.rejectUnauthorized = getEnv('dbSslRejectUnauthorized', { dataSource }); + ssl.rejectUnauthorized = getEnv('dbSslRejectUnauthorized', { dataSource, preAggregations }); return ssl; } diff --git a/packages/cubejs-bigquery-driver/src/BigQueryDriver.ts b/packages/cubejs-bigquery-driver/src/BigQueryDriver.ts index 69b8aca39761b..a97c5f9d42c0f 100644 --- a/packages/cubejs-bigquery-driver/src/BigQueryDriver.ts +++ b/packages/cubejs-bigquery-driver/src/BigQueryDriver.ts @@ -91,6 +91,11 @@ export class BigQueryDriver extends BaseDriver implements DriverInterface { */ dataSource?: string, + /** + * Whether this driver is used for pre-aggregations. + */ + preAggregations?: boolean, + /** * Max pool size value for the [cube]<-->[db] pool. */ @@ -110,42 +115,44 @@ export class BigQueryDriver extends BaseDriver implements DriverInterface { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; this.options = { scopes: [ 'https://www.googleapis.com/auth/bigquery', 'https://www.googleapis.com/auth/drive', ], - projectId: getEnv('bigqueryProjectId', { dataSource }), - keyFilename: getEnv('bigqueryKeyFile', { dataSource }), - credentials: getEnv('bigqueryCredentials', { dataSource }) + projectId: getEnv('bigqueryProjectId', { dataSource, preAggregations }), + keyFilename: getEnv('bigqueryKeyFile', { dataSource, preAggregations }), + credentials: getEnv('bigqueryCredentials', { dataSource, preAggregations }) ? JSON.parse( Buffer.from( - getEnv('bigqueryCredentials', { dataSource }), + getEnv('bigqueryCredentials', { dataSource, preAggregations }), 'base64', ).toString('utf8') ) : undefined, exportBucket: - getEnv('dbExportBucket', { dataSource }) || - getEnv('bigqueryExportBucket', { dataSource }), - location: getEnv('bigqueryLocation', { dataSource }), + getEnv('dbExportBucket', { dataSource, preAggregations }) || + getEnv('bigqueryExportBucket', { dataSource, preAggregations }), + location: getEnv('bigqueryLocation', { dataSource, preAggregations }), ...config, pollTimeout: ( config.pollTimeout || - getEnv('dbPollTimeout', { dataSource }) || - getEnv('dbQueryTimeout', { dataSource }) + getEnv('dbPollTimeout', { dataSource, preAggregations }) || + getEnv('dbQueryTimeout', { dataSource, preAggregations }) ) * 1000, pollMaxInterval: ( config.pollMaxInterval || - getEnv('dbPollMaxInterval', { dataSource }) + getEnv('dbPollMaxInterval', { dataSource, preAggregations }) ) * 1000, - exportBucketCsvEscapeSymbol: getEnv('dbExportBucketCsvEscapeSymbol', { dataSource }), + exportBucketCsvEscapeSymbol: getEnv('dbExportBucketCsvEscapeSymbol', { dataSource, preAggregations }), userAgent: `CubeDev_Cube/${version}`, }; getEnv('dbExportBucketType', { dataSource, + preAggregations, supported: ['gcp'], }); diff --git a/packages/cubejs-clickhouse-driver/src/ClickHouseDriver.ts b/packages/cubejs-clickhouse-driver/src/ClickHouseDriver.ts index 0e9ac262185a4..831f96a2219d4 100644 --- a/packages/cubejs-clickhouse-driver/src/ClickHouseDriver.ts +++ b/packages/cubejs-clickhouse-driver/src/ClickHouseDriver.ts @@ -91,6 +91,11 @@ export interface ClickHouseDriverOptions { * request before determining it as not valid. Default - 10000 ms. */ testConnectionTimeout?: number, + + /** + * Whether this driver is used for pre-aggregations. + */ + preAggregations?: boolean, } interface ClickhouseDriverExportRequiredAWS { @@ -145,20 +150,21 @@ export class ClickHouseDriver extends BaseDriver implements DriverInterface { }); const dataSource = config.dataSource ?? assertDataSource('default'); - const host = config.host ?? getEnv('dbHost', { dataSource }); - const port = config.port ?? getEnv('dbPort', { dataSource }) ?? 8123; - const protocol = config.protocol ?? (getEnv('dbSsl', { dataSource }) ? 'https:' : 'http:'); + const preAggregations = config.preAggregations || false; + const host = config.host ?? getEnv('dbHost', { dataSource, preAggregations }); + const port = config.port ?? getEnv('dbPort', { dataSource, preAggregations }) ?? 8123; + const protocol = config.protocol ?? (getEnv('dbSsl', { dataSource, preAggregations }) ? 'https:' : 'http:'); const url = `${protocol}//${host}:${port}`; - const username = config.username ?? getEnv('dbUser', { dataSource }); - const password = config.password ?? getEnv('dbPass', { dataSource }); - const database = config.database ?? (getEnv('dbName', { dataSource }) as string) ?? 'default'; + const username = config.username ?? getEnv('dbUser', { dataSource, preAggregations }); + const password = config.password ?? getEnv('dbPass', { dataSource, preAggregations }); + const database = config.database ?? (getEnv('dbName', { dataSource, preAggregations }) as string) ?? 'default'; // TODO this is a bit inconsistent with readOnly - this.readOnlyMode = getEnv('clickhouseReadOnly', { dataSource }); + this.readOnlyMode = getEnv('clickhouseReadOnly', { dataSource, preAggregations }); // Expect that getEnv('dbQueryTimeout') will always return a value - const requestTimeoutEnv: number = getEnv('dbQueryTimeout', { dataSource }) * 1000; + const requestTimeoutEnv: number = getEnv('dbQueryTimeout', { dataSource, preAggregations }) * 1000; const requestTimeout = config.requestTimeout ?? requestTimeoutEnv; this.config = { @@ -166,13 +172,13 @@ export class ClickHouseDriver extends BaseDriver implements DriverInterface { username, password, database, - exportBucket: this.getExportBucket(dataSource), + exportBucket: this.getExportBucket(dataSource, preAggregations), readOnly: !!config.readOnly, requestTimeout, compression: { // Response compression can't be enabled for a user with readonly=1, as ClickHouse will not allow settings modifications for such user. - response: this.readOnlyMode ? false : getEnv('clickhouseCompression', { dataSource }), - request: getEnv('clickhouseCompression', { dataSource }), + response: this.readOnlyMode ? false : getEnv('clickhouseCompression', { dataSource, preAggregations }), + request: getEnv('clickhouseCompression', { dataSource, preAggregations }), }, clickhouseSettings: { /// Default Node.js client has a limit for the max size of HTTP headers. In practise, such headers can be extremely large @@ -185,7 +191,7 @@ export class ClickHouseDriver extends BaseDriver implements DriverInterface { }, }; - const maxPoolSize = config.maxPoolSize ?? getEnv('dbMaxPoolSize', { dataSource }) ?? 8; + const maxPoolSize = config.maxPoolSize ?? getEnv('dbMaxPoolSize', { dataSource, preAggregations }) ?? 8; this.client = this.createClient(maxPoolSize); } @@ -504,20 +510,22 @@ export class ClickHouseDriver extends BaseDriver implements DriverInterface { protected getExportBucket( dataSource: string, + preAggregations?: boolean, ): ClickhouseDriverExportAWS | null { const requiredExportBucket: ClickhouseDriverExportRequiredAWS = { bucketType: getEnv('dbExportBucketType', { supported: SUPPORTED_BUCKET_TYPES, dataSource, + preAggregations, }), - bucketName: getEnv('dbExportBucket', { dataSource }), - region: getEnv('dbExportBucketAwsRegion', { dataSource }), + bucketName: getEnv('dbExportBucket', { dataSource, preAggregations }), + region: getEnv('dbExportBucketAwsRegion', { dataSource, preAggregations }), }; const exportBucket: ClickhouseDriverExportAWS = { ...requiredExportBucket, - keyId: getEnv('dbExportBucketAwsKey', { dataSource }), - secretKey: getEnv('dbExportBucketAwsSecret', { dataSource }), + keyId: getEnv('dbExportBucketAwsKey', { dataSource, preAggregations }), + secretKey: getEnv('dbExportBucketAwsSecret', { dataSource, preAggregations }), }; if (exportBucket.bucketType) { diff --git a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts index 0ff044f80724a..c1f5da3c70a30 100644 --- a/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts +++ b/packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts @@ -187,6 +187,11 @@ export class DatabricksDriver extends JDBCDriver { */ dataSource?: string, + /** + * Whether this driver is used for pre-aggregations. + */ + preAggregations?: boolean, + /** * Max pool size value for the [cube]<-->[db] pool. */ @@ -202,12 +207,13 @@ export class DatabricksDriver extends JDBCDriver { const dataSource = conf.dataSource || assertDataSource('default'); + const preAggregations = conf.preAggregations || false; let showSparkProtocolWarn = false; let url: string = conf?.url || - getEnv('databricksUrl', { dataSource }) || - getEnv('jdbcUrl', { dataSource }); + getEnv('databricksUrl', { dataSource, preAggregations }) || + getEnv('jdbcUrl', { dataSource, preAggregations }); if (url.indexOf('jdbc:spark://') !== -1) { showSparkProtocolWarn = true; url = url.replace('jdbc:spark://', 'jdbc:databricks://'); @@ -215,10 +221,10 @@ export class DatabricksDriver extends JDBCDriver { const [uid, pwd, cleanedUrl] = extractAndRemoveUidPwdFromJdbcUrl(url); const passwd = conf?.token || - getEnv('databricksToken', { dataSource }) || + getEnv('databricksToken', { dataSource, preAggregations }) || pwd; - const oauthClientId = conf?.oauthClientId || getEnv('databricksOAuthClientId', { dataSource }); - const oauthClientSecret = conf?.oauthClientSecret || getEnv('databricksOAuthClientSecret', { dataSource }); + const oauthClientId = conf?.oauthClientId || getEnv('databricksOAuthClientId', { dataSource, preAggregations }); + const oauthClientSecret = conf?.oauthClientSecret || getEnv('databricksOAuthClientSecret', { dataSource, preAggregations }); if (oauthClientId && !oauthClientSecret) { throw new Error('Invalid credentials: No OAuth Client Secret provided'); @@ -260,52 +266,52 @@ export class DatabricksDriver extends JDBCDriver { }, catalog: conf?.catalog || - getEnv('databricksCatalog', { dataSource }), - database: getEnv('dbName', { required: false, dataSource }), + getEnv('databricksCatalog', { dataSource, preAggregations }), + database: getEnv('dbName', { required: false, dataSource, preAggregations }), // common export bucket config bucketType: conf?.bucketType || - getEnv('dbExportBucketType', { supported: SUPPORTED_BUCKET_TYPES, dataSource }), + getEnv('dbExportBucketType', { supported: SUPPORTED_BUCKET_TYPES, dataSource, preAggregations }), exportBucket: conf?.exportBucket || - getEnv('dbExportBucket', { dataSource }), + getEnv('dbExportBucket', { dataSource, preAggregations }), exportBucketMountDir: conf?.exportBucketMountDir || - getEnv('dbExportBucketMountDir', { dataSource }), + getEnv('dbExportBucketMountDir', { dataSource, preAggregations }), pollInterval: ( conf?.pollInterval || - getEnv('dbPollMaxInterval', { dataSource }) + getEnv('dbPollMaxInterval', { dataSource, preAggregations }) ) * 1000, // AWS export bucket config awsKey: conf?.awsKey || - getEnv('dbExportBucketAwsKey', { dataSource }), + getEnv('dbExportBucketAwsKey', { dataSource, preAggregations }), awsSecret: conf?.awsSecret || - getEnv('dbExportBucketAwsSecret', { dataSource }), + getEnv('dbExportBucketAwsSecret', { dataSource, preAggregations }), awsRegion: conf?.awsRegion || - getEnv('dbExportBucketAwsRegion', { dataSource }), + getEnv('dbExportBucketAwsRegion', { dataSource, preAggregations }), // Azure export bucket azureKey: conf?.azureKey || - getEnv('dbExportBucketAzureKey', { dataSource }), + getEnv('dbExportBucketAzureKey', { dataSource, preAggregations }), exportBucketCsvEscapeSymbol: - getEnv('dbExportBucketCsvEscapeSymbol', { dataSource }), + getEnv('dbExportBucketCsvEscapeSymbol', { dataSource, preAggregations }), // Azure service principal azureTenantId: conf?.azureTenantId || - getEnv('dbExportBucketAzureTenantId', { dataSource }), + getEnv('dbExportBucketAzureTenantId', { dataSource, preAggregations }), azureClientId: conf?.azureClientId || - getEnv('dbExportBucketAzureClientId', { dataSource }), + getEnv('dbExportBucketAzureClientId', { dataSource, preAggregations }), azureClientSecret: conf?.azureClientSecret || - getEnv('dbExportBucketAzureClientSecret', { dataSource }), + getEnv('dbExportBucketAzureClientSecret', { dataSource, preAggregations }), // GCS credentials gcsCredentials: conf?.gcsCredentials || - getEnv('dbExportGCSCredentials', { dataSource }), + getEnv('dbExportGCSCredentials', { dataSource, preAggregations }), }; if (config.readOnly === undefined) { // we can set readonly to true if there is no bucket config provided diff --git a/packages/cubejs-dremio-driver/driver/DremioDriver.js b/packages/cubejs-dremio-driver/driver/DremioDriver.js index 300f9dc795d41..2db10384dd684 100644 --- a/packages/cubejs-dremio-driver/driver/DremioDriver.js +++ b/packages/cubejs-dremio-driver/driver/DremioDriver.js @@ -47,45 +47,46 @@ class DremioDriver extends BaseDriver { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; this.config = { dbUrl: config.dbUrl || - getEnv('dbUrl', { dataSource }) || + getEnv('dbUrl', { dataSource, preAggregations }) || '', dremioAuthToken: config.dremioAuthToken || - getEnv('dremioAuthToken', { dataSource }) || + getEnv('dremioAuthToken', { dataSource, preAggregations }) || '', host: config.host || - getEnv('dbHost', { dataSource }) || + getEnv('dbHost', { dataSource, preAggregations }) || 'localhost', port: config.port || - getEnv('dbPort', { dataSource }) || + getEnv('dbPort', { dataSource, preAggregations }) || 9047, user: config.user || - getEnv('dbUser', { dataSource }), + getEnv('dbUser', { dataSource, preAggregations }), password: config.password || - getEnv('dbPass', { dataSource }), + getEnv('dbPass', { dataSource, preAggregations }), database: config.database || - getEnv('dbName', { dataSource }), + getEnv('dbName', { dataSource, preAggregations }), ssl: config.ssl || - getEnv('dbSsl', { dataSource }), + getEnv('dbSsl', { dataSource, preAggregations }), ...config, pollTimeout: ( config.pollTimeout || - getEnv('dbPollTimeout', { dataSource }) || - getEnv('dbQueryTimeout', { dataSource }) + getEnv('dbPollTimeout', { dataSource, preAggregations }) || + getEnv('dbQueryTimeout', { dataSource, preAggregations }) ) * 1000, pollMaxInterval: ( config.pollMaxInterval || - getEnv('dbPollMaxInterval', { dataSource }) + getEnv('dbPollMaxInterval', { dataSource, preAggregations }) ) * 1000, }; diff --git a/packages/cubejs-druid-driver/src/DruidDriver.ts b/packages/cubejs-druid-driver/src/DruidDriver.ts index 1fa661b4a50ae..028f27db9f376 100644 --- a/packages/cubejs-druid-driver/src/DruidDriver.ts +++ b/packages/cubejs-druid-driver/src/DruidDriver.ts @@ -50,6 +50,11 @@ export class DruidDriver extends BaseDriver { */ dataSource?: string, + /** + * Whether this driver is used for pre-aggregations. + */ + preAggregations?: boolean, + /** * Max pool size value for the [cube]<-->[db] pool. */ @@ -69,14 +74,15 @@ export class DruidDriver extends BaseDriver { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; - let url = config.url || getEnv('dbUrl', { dataSource }); + let url = config.url || getEnv('dbUrl', { dataSource, preAggregations }); if (!url) { - const host = getEnv('dbHost', { dataSource }); - const port = getEnv('dbPort', { dataSource }); + const host = getEnv('dbHost', { dataSource, preAggregations }); + const port = getEnv('dbPort', { dataSource, preAggregations }); if (host && port) { - const protocol = getEnv('dbSsl', { dataSource }) + const protocol = getEnv('dbSsl', { dataSource, preAggregations }) ? 'https' : 'http'; url = `${protocol}://${host}:${port}`; @@ -88,13 +94,13 @@ export class DruidDriver extends BaseDriver { url, user: config.user || - getEnv('dbUser', { dataSource }), + getEnv('dbUser', { dataSource, preAggregations }), password: config.password || - getEnv('dbPass', { dataSource }), + getEnv('dbPass', { dataSource, preAggregations }), database: config.database || - getEnv('dbName', { dataSource }) || + getEnv('dbName', { dataSource, preAggregations }) || 'default', ...config, }; diff --git a/packages/cubejs-duckdb-driver/src/DuckDBDriver.ts b/packages/cubejs-duckdb-driver/src/DuckDBDriver.ts index 451ccf3d75811..7819873cf5020 100644 --- a/packages/cubejs-duckdb-driver/src/DuckDBDriver.ts +++ b/packages/cubejs-duckdb-driver/src/DuckDBDriver.ts @@ -25,6 +25,7 @@ export type DuckDBDriverConfiguration = { motherDuckToken?: string, schema?: string, duckdbS3UseCredentialChain?: boolean, + preAggregations?: boolean, }; type InitPromise = { diff --git a/packages/cubejs-elasticsearch-driver/driver/ElasticSearchDriver.js b/packages/cubejs-elasticsearch-driver/driver/ElasticSearchDriver.js index ecc0fe8135fc9..cd0e06e9b801c 100644 --- a/packages/cubejs-elasticsearch-driver/driver/ElasticSearchDriver.js +++ b/packages/cubejs-elasticsearch-driver/driver/ElasticSearchDriver.js @@ -35,18 +35,19 @@ class ElasticSearchDriver extends BaseDriver { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; const auth = { - username: getEnv('dbUser', { dataSource }), - password: getEnv('dbPass', { dataSource }), + username: getEnv('dbUser', { dataSource, preAggregations }), + password: getEnv('dbPass', { dataSource, preAggregations }), }; if ( - getEnv('elasticApiId', { dataSource }) || - getEnv('elasticApiKey', { dataSource }) + getEnv('elasticApiId', { dataSource, preAggregations }) || + getEnv('elasticApiKey', { dataSource, preAggregations }) ) { auth.apiKey = { - id: getEnv('elasticApiId', { dataSource }), - api_key: getEnv('elasticApiKey', { dataSource }), + id: getEnv('elasticApiId', { dataSource, preAggregations }), + api_key: getEnv('elasticApiKey', { dataSource, preAggregations }), }; } @@ -55,14 +56,14 @@ class ElasticSearchDriver extends BaseDriver { // their respective documentation. this.config = { auth, - url: getEnv('dbUrl', { dataSource }), - ssl: this.getSslOptions(dataSource), + url: getEnv('dbUrl', { dataSource, preAggregations }), + ssl: this.getSslOptions(dataSource, preAggregations), openDistro: - (getEnv('elasticOpenDistro', { dataSource }) || 'false') + (getEnv('elasticOpenDistro', { dataSource, preAggregations }) || 'false') .toLowerCase() === 'true' || - getEnv('dbType', { dataSource }) === 'odelasticsearch', + getEnv('dbType', { dataSource, preAggregations }) === 'odelasticsearch', queryFormat: - getEnv('elasticQueryFormat', { dataSource }) || 'jdbc', + getEnv('elasticQueryFormat', { dataSource, preAggregations }) || 'jdbc', ...config, }; diff --git a/packages/cubejs-firebolt-driver/src/FireboltDriver.ts b/packages/cubejs-firebolt-driver/src/FireboltDriver.ts index fb29bb51c82ce..649f724a658a7 100644 --- a/packages/cubejs-firebolt-driver/src/FireboltDriver.ts +++ b/packages/cubejs-firebolt-driver/src/FireboltDriver.ts @@ -67,6 +67,11 @@ export class FireboltDriver extends BaseDriver implements DriverInterface { */ dataSource?: string, + /** + * Whether this driver is used for pre-aggregations. + */ + preAggregations?: boolean, + /** * Max pool size value for the [cube]<-->[db] pool. */ @@ -85,25 +90,26 @@ export class FireboltDriver extends BaseDriver implements DriverInterface { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; - const username = getEnv('dbUser', { dataSource }); + const username = getEnv('dbUser', { dataSource, preAggregations }); const auth = username.includes('@') - ? { username, password: getEnv('dbPass', { dataSource }) } - : { client_id: username, client_secret: getEnv('dbPass', { dataSource }) }; + ? { username, password: getEnv('dbPass', { dataSource, preAggregations }) } + : { client_id: username, client_secret: getEnv('dbPass', { dataSource, preAggregations }) }; this.config = { readOnly: true, requestTimeout: getEnv('dbQueryTimeout') * 1000, apiEndpoint: - getEnv('fireboltApiEndpoint', { dataSource }) || 'api.app.firebolt.io', + getEnv('fireboltApiEndpoint', { dataSource, preAggregations }) || 'api.app.firebolt.io', ...config, connection: { auth, - database: getEnv('dbName', { dataSource }), - account: getEnv('fireboltAccount', { dataSource }), - engineName: getEnv('fireboltEngineName', { dataSource }), + database: getEnv('dbName', { dataSource, preAggregations }), + account: getEnv('fireboltAccount', { dataSource, preAggregations }), + engineName: getEnv('fireboltEngineName', { dataSource, preAggregations }), // engineEndpoint was deprecated in favor of engineName + account - engineEndpoint: getEnv('fireboltEngineEndpoint', { dataSource }), + engineEndpoint: getEnv('fireboltEngineEndpoint', { dataSource, preAggregations }), additionalParameters: { userClients: [{ name: 'CubeDev+Cube', diff --git a/packages/cubejs-hive-driver/src/HiveDriver.js b/packages/cubejs-hive-driver/src/HiveDriver.js index 63908361e9cce..32c7355fa5ff5 100644 --- a/packages/cubejs-hive-driver/src/HiveDriver.js +++ b/packages/cubejs-hive-driver/src/HiveDriver.js @@ -61,21 +61,22 @@ class HiveDriver extends BaseDriver { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; this.config = { auth: 'PLAIN', - host: getEnv('dbHost', { dataSource }), - port: getEnv('dbPort', { dataSource }), - dbName: getEnv('dbName', { dataSource }) || 'default', + host: getEnv('dbHost', { dataSource, preAggregations }), + port: getEnv('dbPort', { dataSource, preAggregations }), + dbName: getEnv('dbName', { dataSource, preAggregations }) || 'default', timeout: 10000, - username: getEnv('dbUser', { dataSource }), - password: getEnv('dbPass', { dataSource }), - hiveType: getEnv('hiveType', { dataSource }) === 'CDH' + username: getEnv('dbUser', { dataSource, preAggregations }), + password: getEnv('dbPass', { dataSource, preAggregations }), + hiveType: getEnv('hiveType', { dataSource, preAggregations }) === 'CDH' ? HS2Util.HIVE_TYPE.CDH : HS2Util.HIVE_TYPE.HIVE, - hiveVer: getEnv('hiveVer', { dataSource }) || '2.1.1', - thriftVer: getEnv('hiveThriftVer', { dataSource }) || '0.9.3', - cdhVer: getEnv('hiveCdhVer', { dataSource }), + hiveVer: getEnv('hiveVer', { dataSource, preAggregations }) || '2.1.1', + thriftVer: getEnv('hiveThriftVer', { dataSource, preAggregations }) || '0.9.3', + cdhVer: getEnv('hiveCdhVer', { dataSource, preAggregations }), authZid: 'cube.js', ...config }; @@ -124,7 +125,7 @@ class HiveDriver extends BaseDriver { min: 0, max: config.maxPoolSize || - getEnv('dbMaxPoolSize', { dataSource }) || + getEnv('dbMaxPoolSize', { dataSource, preAggregations }) || 8, evictionRunIntervalMillis: 10000, softIdleTimeoutMillis: 30000, diff --git a/packages/cubejs-jdbc-driver/src/JDBCDriver.ts b/packages/cubejs-jdbc-driver/src/JDBCDriver.ts index 115118b4ada44..8d40b87353eb0 100644 --- a/packages/cubejs-jdbc-driver/src/JDBCDriver.ts +++ b/packages/cubejs-jdbc-driver/src/JDBCDriver.ts @@ -86,6 +86,11 @@ export class JDBCDriver extends BaseDriver { */ dataSource?: string, + /** + * Whether this driver is used for pre-aggregations. + */ + preAggregations?: boolean, + /** * Max pool size value for the [cube]<-->[db] pool. */ @@ -105,20 +110,21 @@ export class JDBCDriver extends BaseDriver { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; const { poolOptions, ...dbOptions } = config; const dbTypeDescription = JDBCDriver.dbTypeDescription( - (config.dbType || getEnv('dbType', { dataSource })), + (config.dbType || getEnv('dbType', { dataSource, preAggregations })), ); this.config = { - dbType: getEnv('dbType', { dataSource }), + dbType: getEnv('dbType', { dataSource, preAggregations }), url: - getEnv('jdbcUrl', { dataSource }) || + getEnv('jdbcUrl', { dataSource, preAggregations }) || dbTypeDescription && dbTypeDescription.jdbcUrl(), drivername: - getEnv('jdbcDriver', { dataSource }) || + getEnv('jdbcDriver', { dataSource, preAggregations }) || dbTypeDescription && dbTypeDescription.driverClass, properties: dbTypeDescription && dbTypeDescription.properties, ...dbOptions @@ -171,7 +177,7 @@ export class JDBCDriver extends BaseDriver { ) }, { min: 0, - max: config.maxPoolSize || getEnv('dbMaxPoolSize', { dataSource }) || 8, + max: config.maxPoolSize || getEnv('dbMaxPoolSize', { dataSource, preAggregations }) || 8, evictionRunIntervalMillis: 10000, softIdleTimeoutMillis: 30000, idleTimeoutMillis: 30000, diff --git a/packages/cubejs-ksql-driver/src/KsqlDriver.ts b/packages/cubejs-ksql-driver/src/KsqlDriver.ts index 0af5c0f0d4089..f4feab1a8a63c 100644 --- a/packages/cubejs-ksql-driver/src/KsqlDriver.ts +++ b/packages/cubejs-ksql-driver/src/KsqlDriver.ts @@ -97,6 +97,11 @@ export class KsqlDriver extends BaseDriver implements DriverInterface { */ dataSource?: string, + /** + * Whether this driver is used for pre-aggregations. + */ + preAggregations?: boolean, + /** * Max pool size value for the [cube]<-->[db] pool. */ @@ -116,15 +121,16 @@ export class KsqlDriver extends BaseDriver implements DriverInterface { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; this.config = { - url: getEnv('dbUrl', { dataSource }), - username: getEnv('dbUser', { dataSource }), - password: getEnv('dbPass', { dataSource }), - kafkaHost: getEnv('dbKafkaHost', { dataSource }), - kafkaUser: getEnv('dbKafkaUser', { dataSource }), - kafkaPassword: getEnv('dbKafkaPass', { dataSource }), - kafkaUseSsl: getEnv('dbKafkaUseSsl', { dataSource }), + url: getEnv('dbUrl', { dataSource, preAggregations }), + username: getEnv('dbUser', { dataSource, preAggregations }), + password: getEnv('dbPass', { dataSource, preAggregations }), + kafkaHost: getEnv('dbKafkaHost', { dataSource, preAggregations }), + kafkaUser: getEnv('dbKafkaUser', { dataSource, preAggregations }), + kafkaPassword: getEnv('dbKafkaPass', { dataSource, preAggregations }), + kafkaUseSsl: getEnv('dbKafkaUseSsl', { dataSource, preAggregations }), ...config, }; diff --git a/packages/cubejs-mongobi-driver/src/MongoBIDriver.ts b/packages/cubejs-mongobi-driver/src/MongoBIDriver.ts index b1a45c5c82c88..1c35965c8f75e 100644 --- a/packages/cubejs-mongobi-driver/src/MongoBIDriver.ts +++ b/packages/cubejs-mongobi-driver/src/MongoBIDriver.ts @@ -49,6 +49,11 @@ export class MongoBIDriver extends BaseDriver implements DriverInterface { */ dataSource?: string, + /** + * Whether this driver is used for pre-aggregations. + */ + preAggregations?: boolean, + /** * Max pool size value for the [cube]<-->[db] pool. */ @@ -68,16 +73,17 @@ export class MongoBIDriver extends BaseDriver implements DriverInterface { // eslint-disable-next-line @typescript-eslint/no-unused-vars const { dataSource: configDataSource, maxPoolSize, testConnectionTimeout, ...mongoBIDriverConfiguration } = config; const dataSource = configDataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; this.config = { - host: getEnv('dbHost', { dataSource }), - database: getEnv('dbName', { dataSource }), - port: getEnv('dbPort', { dataSource }), - user: getEnv('dbUser', { dataSource }), - password: getEnv('dbPass', { dataSource }), + host: getEnv('dbHost', { dataSource, preAggregations }), + database: getEnv('dbName', { dataSource, preAggregations }), + port: getEnv('dbPort', { dataSource, preAggregations }), + user: getEnv('dbUser', { dataSource, preAggregations }), + password: getEnv('dbPass', { dataSource, preAggregations }), // mysql2 uses own typings for ssl property, which is not correct // Types of property 'pfx' are incompatible. Skipping validation with any cast - ssl: this.getSslOptions(dataSource) as any, + ssl: this.getSslOptions(dataSource, preAggregations) as any, typeCast: (field: Field, next) => { if (field.type === 'DATETIME') { // Example value 1998-08-02 00:00:00 @@ -125,7 +131,7 @@ export class MongoBIDriver extends BaseDriver implements DriverInterface { min: 0, max: config.maxPoolSize || - getEnv('dbMaxPoolSize', { dataSource }) || + getEnv('dbMaxPoolSize', { dataSource, preAggregations }) || 8, evictionRunIntervalMillis: 10000, softIdleTimeoutMillis: 30000, diff --git a/packages/cubejs-mssql-driver/src/MSSqlDriver.ts b/packages/cubejs-mssql-driver/src/MSSqlDriver.ts index f9b750ab50283..d24fdc498e4c9 100644 --- a/packages/cubejs-mssql-driver/src/MSSqlDriver.ts +++ b/packages/cubejs-mssql-driver/src/MSSqlDriver.ts @@ -85,6 +85,11 @@ export class MSSqlDriver extends BaseDriver implements DriverInterface { */ dataSource?: string, + /** + * Whether this driver is used for pre-aggregations. + */ + preAggregations?: boolean, + /** * Max pool size value for the [cube]<-->[db] pool. */ @@ -109,27 +114,28 @@ export class MSSqlDriver extends BaseDriver implements DriverInterface { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; this.config = { readOnly: true, - server: getEnv('dbHost', { dataSource }), - database: getEnv('dbName', { dataSource }), - port: getEnv('dbPort', { dataSource }), - user: getEnv('dbUser', { dataSource }), - password: getEnv('dbPass', { dataSource }), - domain: getEnv('dbDomain', { dataSource }), + server: getEnv('dbHost', { dataSource, preAggregations }), + database: getEnv('dbName', { dataSource, preAggregations }), + port: getEnv('dbPort', { dataSource, preAggregations }), + user: getEnv('dbUser', { dataSource, preAggregations }), + password: getEnv('dbPass', { dataSource, preAggregations }), + domain: getEnv('dbDomain', { dataSource, preAggregations }), requestTimeout: getEnv('dbQueryTimeout') * 1000, options: { - encrypt: getEnv('dbSsl', { dataSource }), + encrypt: getEnv('dbSsl', { dataSource, preAggregations }), useUTC: true }, pool: { max: config.maxPoolSize || - getEnv('dbMaxPoolSize', { dataSource }) || + getEnv('dbMaxPoolSize', { dataSource, preAggregations }) || 8, min: config.minPoolSize || - getEnv('dbMinPoolSize', { dataSource }) || + getEnv('dbMinPoolSize', { dataSource, preAggregations }) || 0, idleTimeoutMillis: 30 * 1000, acquireTimeoutMillis: 20 * 1000 diff --git a/packages/cubejs-mysql-aurora-serverless-driver/driver/AuroraServerlessMySqlDriver.js b/packages/cubejs-mysql-aurora-serverless-driver/driver/AuroraServerlessMySqlDriver.js index 1ea5bcd1af5cf..18703cb16b210 100644 --- a/packages/cubejs-mysql-aurora-serverless-driver/driver/AuroraServerlessMySqlDriver.js +++ b/packages/cubejs-mysql-aurora-serverless-driver/driver/AuroraServerlessMySqlDriver.js @@ -40,20 +40,21 @@ class AuroraServerlessMySqlDriver extends BaseDriver { const dataSource = config.dataSource || assertDataSource('default'); - + const preAggregations = config.preAggregations || false; + this.config = { secretArn: config.secretArn || - getEnv('auroraSecretArn', { dataSource }), + getEnv('auroraSecretArn', { dataSource, preAggregations }), resourceArn: config.resourceArn || // TODO (buntarb): this looks like a typo. Deprecate? config.resourceArm || - getEnv('auroraClusterArn', { dataSource }), + getEnv('auroraClusterArn', { dataSource, preAggregations }), database: config.database || - getEnv('dbName', { dataSource }) || - getEnv('dbDatabase', { dataSource }), + getEnv('dbName', { dataSource, preAggregations }) || + getEnv('dbDatabase', { dataSource, preAggregations }), ...config }; diff --git a/packages/cubejs-mysql-driver/src/MySqlDriver.ts b/packages/cubejs-mysql-driver/src/MySqlDriver.ts index b8ee61fedafb8..aadca93a2cca2 100644 --- a/packages/cubejs-mysql-driver/src/MySqlDriver.ts +++ b/packages/cubejs-mysql-driver/src/MySqlDriver.ts @@ -104,6 +104,11 @@ export class MySqlDriver extends BaseDriver implements DriverInterface { */ dataSource?: string, + /** + * Whether this driver is used for pre-aggregations. + */ + preAggregations?: boolean, + /** * Max pool size value for the [cube]<-->[db] pool. */ @@ -123,17 +128,18 @@ export class MySqlDriver extends BaseDriver implements DriverInterface { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; const { pool, ...restConfig } = config; this.config = { - host: getEnv('dbHost', { dataSource }), - database: getEnv('dbName', { dataSource }), - port: getEnv('dbPort', { dataSource }), - user: getEnv('dbUser', { dataSource }), - password: getEnv('dbPass', { dataSource }), - socketPath: getEnv('dbSocketPath', { dataSource }), + host: getEnv('dbHost', { dataSource, preAggregations }), + database: getEnv('dbName', { dataSource, preAggregations }), + port: getEnv('dbPort', { dataSource, preAggregations }), + user: getEnv('dbUser', { dataSource, preAggregations }), + password: getEnv('dbPass', { dataSource, preAggregations }), + socketPath: getEnv('dbSocketPath', { dataSource, preAggregations }), timezone: 'Z', - ssl: this.getSslOptions(dataSource), + ssl: this.getSslOptions(dataSource, preAggregations), dateStrings: true, readOnly: true, ...restConfig, @@ -168,7 +174,7 @@ export class MySqlDriver extends BaseDriver implements DriverInterface { min: 0, max: config.maxPoolSize || - getEnv('dbMaxPoolSize', { dataSource }) || + getEnv('dbMaxPoolSize', { dataSource, preAggregations }) || 8, evictionRunIntervalMillis: 10000, softIdleTimeoutMillis: 30000, diff --git a/packages/cubejs-oracle-driver/driver/OracleDriver.js b/packages/cubejs-oracle-driver/driver/OracleDriver.js index bd3c36a34d2e0..232282ac54ca9 100644 --- a/packages/cubejs-oracle-driver/driver/OracleDriver.js +++ b/packages/cubejs-oracle-driver/driver/OracleDriver.js @@ -65,6 +65,7 @@ class OracleDriver extends BaseDriver { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; this.db = oracledb; this.db.outFormat = this.db.OBJECT; @@ -72,15 +73,15 @@ class OracleDriver extends BaseDriver { this.db.maxRows = 100000; this.db.prefetchRows = 500; this.config = { - user: getEnv('dbUser', { dataSource }), - password: getEnv('dbPass', { dataSource }), - db: getEnv('dbName', { dataSource }), - host: getEnv('dbHost', { dataSource }), - port: getEnv('dbPort', { dataSource }) || 1521, + user: getEnv('dbUser', { dataSource, preAggregations }), + password: getEnv('dbPass', { dataSource, preAggregations }), + db: getEnv('dbName', { dataSource, preAggregations }), + host: getEnv('dbHost', { dataSource, preAggregations }), + port: getEnv('dbPort', { dataSource, preAggregations }) || 1521, poolMin: 0, poolMax: config.maxPoolSize || - getEnv('dbMaxPoolSize', { dataSource }) || + getEnv('dbMaxPoolSize', { dataSource, preAggregations }) || 50, ...config }; diff --git a/packages/cubejs-pinot-driver/src/PinotDriver.ts b/packages/cubejs-pinot-driver/src/PinotDriver.ts index fa7ed2e3d7c9b..e5016cd02f32a 100644 --- a/packages/cubejs-pinot-driver/src/PinotDriver.ts +++ b/packages/cubejs-pinot-driver/src/PinotDriver.ts @@ -34,6 +34,7 @@ export type PinotDriverConfiguration = { dataSource?: string; queryTimeout?: number; nullHandling?: boolean; + preAggregations?: boolean; }; type AuthorizationHeaders = { @@ -95,22 +96,23 @@ export class PinotDriver extends BaseDriver implements DriverInterface { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; this.config = { - host: getEnv('dbHost', { dataSource }), - port: getEnv('dbPort', { dataSource }), - user: getEnv('dbUser', { dataSource }), - database: getEnv('dbName', { dataSource }), - basicAuth: getEnv('dbPass', { dataSource }) + host: getEnv('dbHost', { dataSource, preAggregations }), + port: getEnv('dbPort', { dataSource, preAggregations }), + user: getEnv('dbUser', { dataSource, preAggregations }), + database: getEnv('dbName', { dataSource, preAggregations }), + basicAuth: getEnv('dbPass', { dataSource, preAggregations }) ? { - user: getEnv('dbUser', { dataSource }), - password: getEnv('dbPass', { dataSource }), + user: getEnv('dbUser', { dataSource, preAggregations }), + password: getEnv('dbPass', { dataSource, preAggregations }), } : undefined, - authToken: getEnv('pinotAuthToken', { dataSource }), - ssl: this.getSslOptions(dataSource), - nullHandling: getEnv('pinotNullHandling', { dataSource }), - queryTimeout: getEnv('dbQueryTimeout', { dataSource }), + authToken: getEnv('pinotAuthToken', { dataSource, preAggregations }), + ssl: this.getSslOptions(dataSource, preAggregations), + nullHandling: getEnv('pinotNullHandling', { dataSource, preAggregations }), + queryTimeout: getEnv('dbQueryTimeout', { dataSource, preAggregations }), ...config }; diff --git a/packages/cubejs-postgres-driver/src/PostgresDriver.ts b/packages/cubejs-postgres-driver/src/PostgresDriver.ts index 95ad9ec9896b5..2d1d55e364ea7 100644 --- a/packages/cubejs-postgres-driver/src/PostgresDriver.ts +++ b/packages/cubejs-postgres-driver/src/PostgresDriver.ts @@ -97,6 +97,11 @@ export class PostgresDriver[db] pool. */ @@ -121,14 +126,15 @@ export class PostgresDriver this.databasePoolError(err)); this.config = >{ - ...this.getInitialConfiguration(dataSource), - executionTimeout: getEnv('dbQueryTimeout', { dataSource }), - exportBucketCsvEscapeSymbol: getEnv('dbExportBucketCsvEscapeSymbol', { dataSource }), + ...this.getInitialConfiguration(dataSource, preAggregations), + executionTimeout: getEnv('dbQueryTimeout', { dataSource, preAggregations }), + exportBucketCsvEscapeSymbol: getEnv('dbExportBucketCsvEscapeSymbol', { dataSource, preAggregations }), ...config, }; this.enabled = true; @@ -221,6 +227,8 @@ export class PostgresDriver { return { readOnly: true, diff --git a/packages/cubejs-prestodb-driver/src/PrestoDriver.ts b/packages/cubejs-prestodb-driver/src/PrestoDriver.ts index 925e0b7e015e8..f8902904e716e 100644 --- a/packages/cubejs-prestodb-driver/src/PrestoDriver.ts +++ b/packages/cubejs-prestodb-driver/src/PrestoDriver.ts @@ -52,6 +52,7 @@ export type PrestoDriverConfiguration = PrestoDriverExportBucket & { ssl?: string | TLSConnectionOptions; dataSource?: string; queryTimeout?: number; + preAggregations?: boolean; }; const SUPPORTED_BUCKET_TYPES = ['gcs', 's3']; @@ -83,37 +84,38 @@ export class PrestoDriver extends BaseDriver implements DriverInterface { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; - const dbUser = getEnv('dbUser', { dataSource }); - const dbPassword = getEnv('dbPass', { dataSource }); - const authToken = getEnv('prestoAuthToken', { dataSource }); + const dbUser = getEnv('dbUser', { dataSource, preAggregations }); + const dbPassword = getEnv('dbPass', { dataSource, preAggregations }); + const authToken = getEnv('prestoAuthToken', { dataSource, preAggregations }); if (authToken && dbPassword) { throw new Error('Both user/password and auth token are set. Please remove password or token.'); } - this.useSelectTestConnection = getEnv('dbUseSelectTestConnection', { dataSource }); + this.useSelectTestConnection = getEnv('dbUseSelectTestConnection', { dataSource, preAggregations }); this.config = { - host: getEnv('dbHost', { dataSource }), - port: getEnv('dbPort', { dataSource }), + host: getEnv('dbHost', { dataSource, preAggregations }), + port: getEnv('dbPort', { dataSource, preAggregations }), catalog: - getEnv('prestoCatalog', { dataSource }) || - getEnv('dbCatalog', { dataSource }), + getEnv('prestoCatalog', { dataSource, preAggregations }) || + getEnv('dbCatalog', { dataSource, preAggregations }), schema: - getEnv('dbName', { dataSource }) || - getEnv('dbSchema', { dataSource }), + getEnv('dbName', { dataSource, preAggregations }) || + getEnv('dbSchema', { dataSource, preAggregations }), user: dbUser, ...(authToken ? { custom_auth: `Bearer ${authToken}` } : {}), ...(dbPassword ? { basic_auth: { user: dbUser, password: dbPassword } } : {}), - ssl: this.getSslOptions(dataSource), - bucketType: getEnv('dbExportBucketType', { supported: SUPPORTED_BUCKET_TYPES, dataSource }), - exportBucket: getEnv('dbExportBucket', { dataSource }), - accessKeyId: getEnv('dbExportBucketAwsKey', { dataSource }), - secretAccessKey: getEnv('dbExportBucketAwsSecret', { dataSource }), - exportBucketRegion: getEnv('dbExportBucketAwsRegion', { dataSource }), - credentials: getEnv('dbExportGCSCredentials', { dataSource }), - queryTimeout: getEnv('dbQueryTimeout', { dataSource }), + ssl: this.getSslOptions(dataSource, preAggregations), + bucketType: getEnv('dbExportBucketType', { supported: SUPPORTED_BUCKET_TYPES, dataSource, preAggregations }), + exportBucket: getEnv('dbExportBucket', { dataSource, preAggregations }), + accessKeyId: getEnv('dbExportBucketAwsKey', { dataSource, preAggregations }), + secretAccessKey: getEnv('dbExportBucketAwsSecret', { dataSource, preAggregations }), + exportBucketRegion: getEnv('dbExportBucketAwsRegion', { dataSource, preAggregations }), + credentials: getEnv('dbExportGCSCredentials', { dataSource, preAggregations }), + queryTimeout: getEnv('dbQueryTimeout', { dataSource, preAggregations }), ...config }; this.catalog = this.config.catalog; diff --git a/packages/cubejs-query-orchestrator/src/orchestrator/DriverFactory.ts b/packages/cubejs-query-orchestrator/src/orchestrator/DriverFactory.ts index d43a8ee92575a..6344f399af414 100644 --- a/packages/cubejs-query-orchestrator/src/orchestrator/DriverFactory.ts +++ b/packages/cubejs-query-orchestrator/src/orchestrator/DriverFactory.ts @@ -2,4 +2,4 @@ import type { BaseDriver } from '@cubejs-backend/base-driver'; export type DriverFactory = () => (Promise | BaseDriver); export type DriverFactoryByDataSource = - (dataSource: string) => (Promise | BaseDriver); + (dataSource: string, preAggregations?: boolean) => (Promise | BaseDriver); diff --git a/packages/cubejs-query-orchestrator/src/orchestrator/PreAggregations.ts b/packages/cubejs-query-orchestrator/src/orchestrator/PreAggregations.ts index 71eea71b64cf1..7d00e5fe1e91b 100644 --- a/packages/cubejs-query-orchestrator/src/orchestrator/PreAggregations.ts +++ b/packages/cubejs-query-orchestrator/src/orchestrator/PreAggregations.ts @@ -438,7 +438,7 @@ export class PreAggregations { ): Promise<[boolean, string]> { // fetching tables const loadCache = new PreAggregationLoadCache( - () => this.driverFactory(dataSource), + () => this.driverFactory(dataSource, true), this.queryCache, this, { @@ -509,7 +509,7 @@ export class PreAggregations { if (!loadCacheByDataSource[`${dataSource}_${preAggregationSchema}`]) { loadCacheByDataSource[`${dataSource}_${preAggregationSchema}`] = new PreAggregationLoadCache( - () => this.driverFactory(dataSource), + () => this.driverFactory(dataSource, true), this.queryCache, this, { @@ -532,7 +532,7 @@ export class PreAggregations { const preAggregationsTablesToTempTablesPromise = preAggregations.map((p: PreAggregationDescription, i) => (preAggregationsTablesToTempTables) => { const loader = new PreAggregationPartitionRangeLoader( - () => this.driverFactory(p.dataSource || 'default'), + () => this.driverFactory(p.dataSource || 'default', true), this.logger, this.queryCache, this, @@ -625,7 +625,7 @@ export class PreAggregations { if (!loadCacheByDataSource[`${dataSource}_${preAggregationSchema}`]) { loadCacheByDataSource[`${dataSource}_${preAggregationSchema}`] = new PreAggregationLoadCache( - () => this.driverFactory(dataSource), + () => this.driverFactory(dataSource, true), this.queryCache, this, { @@ -640,7 +640,7 @@ export class PreAggregations { const expandedPreAggregations: PreAggregationDescription[][] = await Promise.all(preAggregations.map(p => { const loader = new PreAggregationPartitionRangeLoader( - () => this.driverFactory(p.dataSource || 'default'), + () => this.driverFactory(p.dataSource || 'default', true), this.logger, this.queryCache, this, @@ -677,20 +677,20 @@ export class PreAggregations { if (!this.queue[dataSource]) { this.queue[dataSource] = QueryCache.createQueue( `SQL_PRE_AGGREGATIONS_${this.redisPrefix}_${dataSource}`, - () => this.driverFactory(dataSource), + () => this.driverFactory(dataSource, true), (client, q) => { const { preAggregation, preAggregationsTablesToTempTables, newVersionEntry, requestId, invalidationKeys, buildRangeEnd } = q; const loader = new PreAggregationLoader( - () => this.driverFactory(dataSource), + () => this.driverFactory(dataSource, true), this.logger, this.queryCache, this, preAggregation, preAggregationsTablesToTempTables, new PreAggregationLoadCache( - () => this.driverFactory(dataSource), + () => this.driverFactory(dataSource, true), this.queryCache, this, { @@ -736,7 +736,7 @@ export class PreAggregations { requestId } = q; const loadCache = new PreAggregationLoadCache( - () => this.driverFactory(dataSource), + () => this.driverFactory(dataSource, true), this.queryCache, this, { @@ -796,7 +796,7 @@ export class PreAggregations { if (!loadCacheByDataSource[`${dataSource}_${preAggregationSchema}`]) { loadCacheByDataSource[`${dataSource}_${preAggregationSchema}`] = new PreAggregationLoadCache( - () => this.driverFactory(dataSource), + () => this.driverFactory(dataSource, true), this.queryCache, this, { diff --git a/packages/cubejs-questdb-driver/src/QuestDriver.ts b/packages/cubejs-questdb-driver/src/QuestDriver.ts index 015f028757b61..3117b2fbf256c 100644 --- a/packages/cubejs-questdb-driver/src/QuestDriver.ts +++ b/packages/cubejs-questdb-driver/src/QuestDriver.ts @@ -64,6 +64,11 @@ export class QuestDriver[db] pool. */ @@ -83,19 +88,20 @@ export class QuestDriver { diff --git a/packages/cubejs-redshift-driver/src/RedshiftDriver.ts b/packages/cubejs-redshift-driver/src/RedshiftDriver.ts index 46aee81aa3973..af16317b63fa4 100644 --- a/packages/cubejs-redshift-driver/src/RedshiftDriver.ts +++ b/packages/cubejs-redshift-driver/src/RedshiftDriver.ts @@ -74,6 +74,11 @@ export class RedshiftDriver extends PostgresDriver */ dataSource?: string, + /** + * Whether this driver is used for pre-aggregations. + */ + preAggregations?: boolean, + /** * Max pool size value for the [cube]<-->[db] pool. */ @@ -89,19 +94,20 @@ export class RedshiftDriver extends PostgresDriver const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; - const clusterIdentifier = getEnv('redshiftClusterIdentifier', { dataSource }); - const dbPass = getEnv('dbPass', { dataSource }); - const dbUser = getEnv('dbUser', { dataSource }); - const dbName = getEnv('dbName', { dataSource }); + const clusterIdentifier = getEnv('redshiftClusterIdentifier', { dataSource, preAggregations }); + const dbPass = getEnv('dbPass', { dataSource, preAggregations }); + const dbUser = getEnv('dbUser', { dataSource, preAggregations }); + const dbName = getEnv('dbName', { dataSource, preAggregations }); let credentialsProvider: RedshiftCredentialsProvider; if (clusterIdentifier && !dbPass && !config.password) { credentialsProvider = new RedshiftIAMCredentialsProvider({ - region: getEnv('redshiftAwsRegion', { dataSource }), - assumeRoleArn: getEnv('redshiftAssumeRoleArn', { dataSource }), - assumeRoleExternalId: getEnv('redshiftAssumeRoleExternalId', { dataSource }), + region: getEnv('redshiftAwsRegion', { dataSource, preAggregations }), + assumeRoleArn: getEnv('redshiftAssumeRoleArn', { dataSource, preAggregations }), + assumeRoleExternalId: getEnv('redshiftAssumeRoleExternalId', { dataSource, preAggregations }), clusterIdentifier, dbName, }); @@ -269,11 +275,12 @@ export class RedshiftDriver extends PostgresDriver */ protected getInitialConfiguration( dataSource: string, + preAggregations?: boolean, ): Partial { return { // @todo It's not possible to support UNLOAD in readOnly mode, because we need column types (CREATE TABLE?) readOnly: false, - exportBucket: this.getExportBucket(dataSource), + exportBucket: this.getExportBucket(dataSource, preAggregations), }; } @@ -330,6 +337,7 @@ export class RedshiftDriver extends PostgresDriver protected getExportBucket( dataSource: string, + preAggregations?: boolean, ): RedshiftDriverExportAWS | undefined { const supportedBucketTypes = ['s3']; @@ -337,16 +345,17 @@ export class RedshiftDriver extends PostgresDriver bucketType: getEnv('dbExportBucketType', { supported: supportedBucketTypes, dataSource, + preAggregations, }), - bucketName: getEnv('dbExportBucket', { dataSource }), - region: getEnv('dbExportBucketAwsRegion', { dataSource }), + bucketName: getEnv('dbExportBucket', { dataSource, preAggregations }), + region: getEnv('dbExportBucketAwsRegion', { dataSource, preAggregations }), }; const exportBucket: Partial = { ...requiredExportBucket, - keyId: getEnv('dbExportBucketAwsKey', { dataSource }), - secretKey: getEnv('dbExportBucketAwsSecret', { dataSource }), - unloadArn: getEnv('redshiftUnloadArn', { dataSource }), + keyId: getEnv('dbExportBucketAwsKey', { dataSource, preAggregations }), + secretKey: getEnv('dbExportBucketAwsSecret', { dataSource, preAggregations }), + unloadArn: getEnv('redshiftUnloadArn', { dataSource, preAggregations }), }; if (exportBucket.bucketType) { diff --git a/packages/cubejs-server-core/src/core/OptsHandler.ts b/packages/cubejs-server-core/src/core/OptsHandler.ts index 1865f40faa306..cc49c00c203d1 100644 --- a/packages/cubejs-server-core/src/core/OptsHandler.ts +++ b/packages/cubejs-server-core/src/core/OptsHandler.ts @@ -203,6 +203,7 @@ export class OptsHandler { private defaultDriverFactory(ctx: DriverContext): DriverConfig { const type = getEnv('dbType', { dataSource: assertDataSource(ctx.dataSource), + preAggregations: ctx.preAggregations, }); return { type }; } diff --git a/packages/cubejs-server-core/src/core/server.ts b/packages/cubejs-server-core/src/core/server.ts index 15975c2dfb935..3573d596def44 100644 --- a/packages/cubejs-server-core/src/core/server.ts +++ b/packages/cubejs-server-core/src/core/server.ts @@ -17,6 +17,7 @@ import { getEnv, assertDataSource, getRealType, + hasPreAggregationsEnvVars, internalExceptions, track, FileRepository, @@ -599,20 +600,33 @@ export class CubejsServerCore { /** * Driver factory function `DriverFactoryByDataSource`. */ - async (dataSource = 'default') => { - if (driverPromise[dataSource]) { - return driverPromise[dataSource]; + async (dataSource = 'default', preAggregations = false) => { + // Only create a separate pre-agg driver when credentials are actually + // configured (custom driverFactory or PRE_AGGREGATIONS env vars). + const usePreAgg = preAggregations && ( + !!this.options.driverFactory || hasPreAggregationsEnvVars(dataSource) + ); + const factoryKey = usePreAgg ? `${dataSource}__pre_agg` : dataSource; + + if (driverPromise[factoryKey]) { + return driverPromise[factoryKey]; } // eslint-disable-next-line no-return-assign - return driverPromise[dataSource] = (async () => { + return driverPromise[factoryKey] = (async () => { let driver: BaseDriver | null = null; try { + this.logger('Initializing data source connection', { + dataSource, + preAggregations: usePreAgg || false, + }); + driver = await this.resolveDriver( { ...context, dataSource, + preAggregations: usePreAgg || false, }, orchestratorOptions, ); @@ -624,6 +638,11 @@ export class CubejsServerCore { await driver.testConnection(); + this.logger('Data source connection initialized', { + dataSource, + preAggregations: usePreAgg || undefined, + }); + return driver; } @@ -631,7 +650,7 @@ export class CubejsServerCore { `Unexpected return type, driverFactory must return driver (dataSource: "${dataSource}"), actual: ${getRealType(driver)}` ); } catch (e) { - driverPromise[dataSource] = null; + driverPromise[factoryKey] = null; if (driver) { await driver.release(); @@ -868,6 +887,7 @@ export class CubejsServerCore { testConnectionTimeout: options?.testConnectionTimeout, }; opts.dataSource = assertDataSource(context.dataSource); + opts.preAggregations = context.preAggregations || false; return CubejsServerCore.createDriver(type, opts); } } diff --git a/packages/cubejs-server-core/src/core/types.ts b/packages/cubejs-server-core/src/core/types.ts index 4cf172ad70e34..10d680de3b1cd 100644 --- a/packages/cubejs-server-core/src/core/types.ts +++ b/packages/cubejs-server-core/src/core/types.ts @@ -87,6 +87,7 @@ export interface RequestContext { export interface DriverContext extends RequestContext { dataSource: string; + preAggregations?: boolean; } export interface DbTypeInternalContext { @@ -154,6 +155,7 @@ export type ScheduledRefreshContextsFn = () => Promise; // internal export type DriverOptions = { dataSource?: string, + preAggregations?: boolean, maxPoolSize?: number, testConnectionTimeout?: number, }; diff --git a/packages/cubejs-snowflake-driver/src/SnowflakeDriver.ts b/packages/cubejs-snowflake-driver/src/SnowflakeDriver.ts index beb8e758d0069..117883d4c4790 100644 --- a/packages/cubejs-snowflake-driver/src/SnowflakeDriver.ts +++ b/packages/cubejs-snowflake-driver/src/SnowflakeDriver.ts @@ -232,6 +232,11 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface { */ dataSource?: string, + /** + * Whether this driver is used for pre-aggregations. + */ + preAggregations?: boolean, + /** * Max pool size value for the [cube]<-->[db] pool. */ @@ -251,14 +256,15 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; - let privateKey = getEnv('snowflakePrivateKey', { dataSource }); + let privateKey = getEnv('snowflakePrivateKey', { dataSource, preAggregations }); if (privateKey) { // If the private key is encrypted - we need to decrypt it before passing to // snowflake sdk. if (privateKey.includes('BEGIN ENCRYPTED PRIVATE KEY')) { - const keyPasswd = getEnv('snowflakePrivateKeyPass', { dataSource }); + const keyPasswd = getEnv('snowflakePrivateKeyPass', { dataSource, preAggregations }); if (!keyPasswd) { throw new Error( @@ -283,26 +289,26 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface { this.config = { readOnly: false, - host: getEnv('snowflakeHost', { dataSource }), - account: getEnv('snowflakeAccount', { dataSource }), - region: getEnv('snowflakeRegion', { dataSource }), - warehouse: getEnv('snowflakeWarehouse', { dataSource }), - role: getEnv('snowflakeRole', { dataSource }), - clientSessionKeepAlive: getEnv('snowflakeSessionKeepAlive', { dataSource }), - database: getEnv('dbName', { dataSource }), - username: getEnv('dbUser', { dataSource }), - password: getEnv('dbPass', { dataSource }), - authenticator: getEnv('snowflakeAuthenticator', { dataSource }), - oauthToken: getEnv('snowflakeOAuthToken', { dataSource }), - oauthTokenPath: getEnv('snowflakeOAuthTokenPath', { dataSource }), - privateKeyPath: getEnv('snowflakePrivateKeyPath', { dataSource }), - privateKeyPass: getEnv('snowflakePrivateKeyPass', { dataSource }), + host: getEnv('snowflakeHost', { dataSource, preAggregations }), + account: getEnv('snowflakeAccount', { dataSource, preAggregations }), + region: getEnv('snowflakeRegion', { dataSource, preAggregations }), + warehouse: getEnv('snowflakeWarehouse', { dataSource, preAggregations }), + role: getEnv('snowflakeRole', { dataSource, preAggregations }), + clientSessionKeepAlive: getEnv('snowflakeSessionKeepAlive', { dataSource, preAggregations }), + database: getEnv('dbName', { dataSource, preAggregations }), + username: getEnv('dbUser', { dataSource, preAggregations }), + password: getEnv('dbPass', { dataSource, preAggregations }), + authenticator: getEnv('snowflakeAuthenticator', { dataSource, preAggregations }), + oauthToken: getEnv('snowflakeOAuthToken', { dataSource, preAggregations }), + oauthTokenPath: getEnv('snowflakeOAuthTokenPath', { dataSource, preAggregations }), + privateKeyPath: getEnv('snowflakePrivateKeyPath', { dataSource, preAggregations }), + privateKeyPass: getEnv('snowflakePrivateKeyPass', { dataSource, preAggregations }), ...(privateKey ? { privateKey } : {}), - exportBucket: this.getExportBucket(dataSource), + exportBucket: this.getExportBucket(dataSource, preAggregations), resultPrefetch: 1, - executionTimeout: getEnv('dbQueryTimeout', { dataSource }), - identIgnoreCase: getEnv('snowflakeQuotedIdentIgnoreCase', { dataSource }), - exportBucketCsvEscapeSymbol: getEnv('dbExportBucketCsvEscapeSymbol', { dataSource }), + executionTimeout: getEnv('dbQueryTimeout', { dataSource, preAggregations }), + identIgnoreCase: getEnv('snowflakeQuotedIdentIgnoreCase', { dataSource, preAggregations }), + exportBucketCsvEscapeSymbol: getEnv('dbExportBucketCsvEscapeSymbol', { dataSource, preAggregations }), application: 'CubeDev_Cube', ...config }; @@ -328,18 +334,19 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface { protected createExportBucket( dataSource: string, bucketType: string, + preAggregations?: boolean, ): SnowflakeDriverExportBucket { if (bucketType === 's3') { // integrationName is optional for s3 - const integrationName = getEnv('dbExportIntegration', { dataSource }); + const integrationName = getEnv('dbExportIntegration', { dataSource, preAggregations }); // keyId and secretKey are optional for s3 if IAM role is used - const keyId = getEnv('dbExportBucketAwsKey', { dataSource }); - const secretKey = getEnv('dbExportBucketAwsSecret', { dataSource }); + const keyId = getEnv('dbExportBucketAwsKey', { dataSource, preAggregations }); + const secretKey = getEnv('dbExportBucketAwsSecret', { dataSource, preAggregations }); return { bucketType, - bucketName: getEnv('dbExportBucket', { dataSource }), - region: getEnv('dbExportBucketAwsRegion', { dataSource }), + bucketName: getEnv('dbExportBucket', { dataSource, preAggregations }), + region: getEnv('dbExportBucketAwsRegion', { dataSource, preAggregations }), ...(integrationName !== undefined && { integrationName }), ...(keyId !== undefined && { keyId }), ...(secretKey !== undefined && { secretKey }), @@ -350,17 +357,17 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface { // integrationName is required for gcs as the only possible way in snowflake return { bucketType, - bucketName: getEnv('dbExportBucket', { dataSource }), - integrationName: getEnv('dbExportIntegration', { dataSource }), - credentials: getEnv('dbExportGCSCredentials', { dataSource }), + bucketName: getEnv('dbExportBucket', { dataSource, preAggregations }), + integrationName: getEnv('dbExportIntegration', { dataSource, preAggregations }), + credentials: getEnv('dbExportGCSCredentials', { dataSource, preAggregations }), }; } if (bucketType === 'azure') { // integrationName is optional for azure - const integrationName = getEnv('dbExportIntegration', { dataSource }); + const integrationName = getEnv('dbExportIntegration', { dataSource, preAggregations }); // sasToken is optional for azure if storage integration is used - const sasToken = getEnv('dbExportAzureSasToken', { dataSource }); + const sasToken = getEnv('dbExportAzureSasToken', { dataSource, preAggregations }); if (!integrationName && !sasToken) { throw new Error( @@ -369,17 +376,17 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface { } // azureKey is optional if DefaultAzureCredential() is used - const azureKey = getEnv('dbExportBucketAzureKey', { dataSource }); + const azureKey = getEnv('dbExportBucketAzureKey', { dataSource, preAggregations }); // These 3 options make sense in case you want to authorize to Azure from // application running in the k8s environment. - const clientId = getEnv('dbExportBucketAzureClientId', { dataSource }); - const tenantId = getEnv('dbExportBucketAzureTenantId', { dataSource }); - const tokenFilePath = getEnv('dbExportBucketAzureTokenFilePAth', { dataSource }); + const clientId = getEnv('dbExportBucketAzureClientId', { dataSource, preAggregations }); + const tenantId = getEnv('dbExportBucketAzureTenantId', { dataSource, preAggregations }); + const tokenFilePath = getEnv('dbExportBucketAzureTokenFilePAth', { dataSource, preAggregations }); return { bucketType, - bucketName: getEnv('dbExportBucket', { dataSource }), + bucketName: getEnv('dbExportBucket', { dataSource, preAggregations }), ...(integrationName !== undefined && { integrationName }), ...(sasToken !== undefined && { sasToken }), ...(azureKey !== undefined && { azureKey }), @@ -410,15 +417,18 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface { protected getExportBucket( dataSource: string, + preAggregations?: boolean, ): SnowflakeDriverExportBucket | undefined { const bucketType = getEnv('dbExportBucketType', { dataSource, + preAggregations, supported: SUPPORTED_BUCKET_TYPES, }); if (bucketType) { const exportBucket = this.createExportBucket( dataSource, bucketType, + preAggregations, ); const emptyKeys = Object.keys(exportBucket) diff --git a/packages/cubejs-sqlite-driver/driver/SqliteDriver.js b/packages/cubejs-sqlite-driver/driver/SqliteDriver.js index ec6b95cafcce7..ab2a6cebb279d 100644 --- a/packages/cubejs-sqlite-driver/driver/SqliteDriver.js +++ b/packages/cubejs-sqlite-driver/driver/SqliteDriver.js @@ -33,9 +33,10 @@ class SqliteDriver extends BaseDriver { const dataSource = config.dataSource || assertDataSource('default'); + const preAggregations = config.preAggregations || false; this.config = { - database: getEnv('dbName', { dataSource }), + database: getEnv('dbName', { dataSource, preAggregations }), ...config };