Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions .github/workflows/drivers-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,10 @@ jobs:

tests:
runs-on: ubuntu-24.04
permissions:
id-token: write # Needed for OIDC+AWS
contents: read

timeout-minutes: 30
needs: [latest-tag-sha, build]
if: (needs['latest-tag-sha'].outputs.sha != github.sha)
Expand All @@ -225,6 +229,7 @@ jobs:
snowflake
snowflake-encrypted-pk
snowflake-export-bucket-s3
snowflake-export-bucket-s3-via-storage-integration-iam-roles
snowflake-export-bucket-s3-prefix
snowflake-export-bucket-azure
snowflake-export-bucket-azure-prefix
Expand Down Expand Up @@ -259,6 +264,7 @@ jobs:
- snowflake
- snowflake-encrypted-pk
- snowflake-export-bucket-s3
- snowflake-export-bucket-s3-via-storage-integration-iam-roles
- snowflake-export-bucket-s3-prefix
- snowflake-export-bucket-azure
- snowflake-export-bucket-azure-prefix
Expand Down Expand Up @@ -338,6 +344,15 @@ jobs:
gunzip image.tar.gz
docker load -i image.tar

- name: Configure AWS credentials via IRSA
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: ${{ secrets.DRIVERS_TESTS_AWS_ROLE_ARN_FOR_SNOWFLAKE }}
aws-region: us-west-1
mask-aws-account-id: true
if: |
env.DRIVERS_TESTS_ATHENA_CUBEJS_AWS_KEY != '' && matrix.database == 'snowflake-export-bucket-s3-via-storage-integration-iam-roles'

- name: Run tests
uses: nick-fields/retry@v3
# It's enough to test for any one secret because they are set all at once or not set all
Expand Down
17 changes: 8 additions & 9 deletions docs/pages/product/configuration/data-sources/snowflake.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -133,15 +133,13 @@ Storage][google-cloud-storage] for export bucket functionality.

<InfoBox>

Ensure the AWS credentials are correctly configured in IAM to allow reads and
writes to the export bucket in S3 if you are not using storage integration.
If you are using storage integration then you still need to configure access keys
for Cube Store to be able to read from the export bucket.
It's possible to authenticate with IAM roles instead of access keys for Cube Store.
Ensure proper IAM privileges are configured for S3 bucket reads and writes, using either
storage integration or user credentials for Snowflake and either IAM roles/IRSA or user
credentials for Cube Store, with mixed configurations supported.

</InfoBox>

Using IAM user credentials:
Using IAM user credentials for both:

```dotenv
CUBEJS_DB_EXPORT_BUCKET_TYPE=s3
Expand All @@ -151,8 +149,8 @@ CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET=<AWS_SECRET>
CUBEJS_DB_EXPORT_BUCKET_AWS_REGION=<AWS_REGION>
```

[Using Storage Integration][snowflake-docs-aws-integration] to write to Export Bucket and
then Access Keys to read from Cube Store:
Using a [Storage Integration][snowflake-docs-aws-integration] to write to export buckets and
user credentials to read from Cube Store:

```dotenv
CUBEJS_DB_EXPORT_BUCKET_TYPE=s3
Expand All @@ -163,7 +161,8 @@ CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET=<AWS_SECRET>
CUBEJS_DB_EXPORT_BUCKET_AWS_REGION=<AWS_REGION>
```

Using Storage Integration to write to export bocket and IAM role to read from Cube Store:
Using a Storage Integration to write to export bucket and IAM role/IRSA to read from Cube Store:**

```dotenv
CUBEJS_DB_EXPORT_BUCKET_TYPE=s3
CUBEJS_DB_EXPORT_BUCKET=my.bucket.on.s3
Expand Down
1 change: 1 addition & 0 deletions packages/cubejs-snowflake-driver/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
"lint:fix": "eslint --fix src/* --ext .ts"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.726.0",
"@cubejs-backend/base-driver": "1.3.77",
"@cubejs-backend/shared": "1.3.77",
"date-fns-timezone": "^0.1.4",
Expand Down
52 changes: 38 additions & 14 deletions packages/cubejs-snowflake-driver/src/SnowflakeDriver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import {
import { formatToTimeZone } from 'date-fns-timezone';
import fs from 'fs/promises';
import crypto from 'crypto';
import { S3ClientConfig } from '@aws-sdk/client-s3';
import { HydrationMap, HydrationStream } from './HydrationStream';

const SUPPORTED_BUCKET_TYPES = ['s3', 'gcs', 'azure'];
Expand Down Expand Up @@ -106,8 +107,8 @@ const SnowflakeToGenericType: Record<string, GenericDataBaseType> = {
interface SnowflakeDriverExportAWS {
bucketType: 's3',
bucketName: string,
keyId: string,
secretKey: string,
keyId?: string,
secretKey?: string,
region: string,
integrationName?: string,
}
Expand Down Expand Up @@ -328,14 +329,17 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface {
if (bucketType === 's3') {
// integrationName is optional for s3
const integrationName = getEnv('dbExportIntegration', { dataSource });
// keyId and secretKey are optional for s3 if IAM role is used
const keyId = getEnv('dbExportBucketAwsKey', { dataSource });
const secretKey = getEnv('dbExportBucketAwsSecret', { dataSource });

return {
bucketType,
bucketName: getEnv('dbExportBucket', { dataSource }),
keyId: getEnv('dbExportBucketAwsKey', { dataSource }),
secretKey: getEnv('dbExportBucketAwsSecret', { dataSource }),
region: getEnv('dbExportBucketAwsRegion', { dataSource }),
...(integrationName !== undefined && { integrationName }),
...(keyId !== undefined && { keyId }),
...(secretKey !== undefined && { secretKey }),
};
}

Expand Down Expand Up @@ -387,6 +391,20 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface {
);
}

private getRequiredExportBucketKeys(
exportBucket: SnowflakeDriverExportBucket,
emptyKeys: string[]
): string[] {
if (exportBucket.bucketType === 's3') {
const s3Config = exportBucket as SnowflakeDriverExportAWS;
if (s3Config.integrationName) {
return emptyKeys.filter(key => key !== 'keyId' && key !== 'secretKey');
}
}

return emptyKeys;
}

protected getExportBucket(
dataSource: string,
): SnowflakeDriverExportBucket | undefined {
Expand All @@ -402,9 +420,11 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface {

const emptyKeys = Object.keys(exportBucket)
.filter((key: string) => exportBucket[<keyof SnowflakeDriverExportBucket>key] === undefined);
if (emptyKeys.length) {
const keysToValidate = this.getRequiredExportBucketKeys(exportBucket, emptyKeys);

if (keysToValidate.length) {
throw new Error(
`Unsupported configuration exportBucket, some configuration keys are empty: ${emptyKeys.join(',')}`
`Unsupported configuration exportBucket, some configuration keys are empty: ${keysToValidate.join(',')}`
);
}

Expand Down Expand Up @@ -731,7 +751,7 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface {
// Storage integration export flow takes precedence over direct auth if it is defined
if (conf.integrationName) {
optionsToExport.STORAGE_INTEGRATION = conf.integrationName;
} else {
} else if (conf.keyId && conf.secretKey) {
optionsToExport.CREDENTIALS = `(AWS_KEY_ID = '${conf.keyId}' AWS_SECRET_KEY = '${conf.secretKey}')`;
}
} else if (bucketType === 'gcs') {
Expand Down Expand Up @@ -771,14 +791,18 @@ export class SnowflakeDriver extends BaseDriver implements DriverInterface {
const { bucketName, path } = this.parseBucketUrl(this.config.exportBucket!.bucketName);
const exportPrefix = path ? `${path}/${tableName}` : tableName;

const s3Config: S3ClientConfig = { region };
if (keyId && secretKey) {
// If access key and secret are provided, use them as credentials
// Otherwise, let the SDK use the default credential chain (IRSA, instance profile, etc.)
s3Config.credentials = {
accessKeyId: keyId,
secretAccessKey: secretKey,
};
}

return this.extractUnloadedFilesFromS3(
{
credentials: {
accessKeyId: keyId,
secretAccessKey: secretKey,
},
region,
},
s3Config,
bucketName,
exportPrefix,
);
Expand Down
15 changes: 15 additions & 0 deletions packages/cubejs-testing-drivers/fixtures/snowflake.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,21 @@
}
}
},
"export-bucket-s3-via-storage-integration-iam-roles": {
"cube": {
"environment": {
"CUBEJS_DB_EXPORT_BUCKET_TYPE": "s3",
"CUBEJS_DB_EXPORT_BUCKET": "snowflake-drivers-tests-preaggs",
"CUBEJS_DB_EXPORT_BUCKET_AWS_REGION": "us-west-1",
"CUBEJS_DB_EXPORT_INTEGRATION": "DRIVERS_TESTS_PREAGGS_S3",
"AWS_REGION": "us-west-1",
"AWS_ACCESS_KEY_ID": "${AWS_ACCESS_KEY_ID}",
"AWS_SECRET_ACCESS_KEY": "${AWS_SECRET_ACCESS_KEY}",
"AWS_SESSION_TOKEN": "${AWS_SESSION_TOKEN}",
"AWS_DEFAULT_REGION": "${AWS_DEFAULT_REGION}"
}
}
},
"export-bucket-azure": {
"cube": {
"environment": {
Expand Down
3 changes: 2 additions & 1 deletion packages/cubejs-testing-drivers/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
"snowflake-full": "yarn test-driver -i dist/test/snowflake-full.test.js",
"snowflake-encrypted-pk-full": "yarn test-driver -i dist/test/snowflake-encrypted-pk-full.test.js",
"snowflake-export-bucket-s3-full": "yarn test-driver -i dist/test/snowflake-export-bucket-s3-full.test.js",
"snowflake-export-bucket-s3-via-storage-integration-iam-roles-full": "yarn test-driver -i dist/test/snowflake-export-bucket-s3-via-storage-integration-iam-roles-full.test.js",
"snowflake-export-bucket-s3-prefix-full": "yarn test-driver -i dist/test/snowflake-export-bucket-s3-prefix-full.test.js",
"snowflake-export-bucket-azure-full": "yarn test-driver -i dist/test/snowflake-export-bucket-azure-full.test.js",
"snowflake-export-bucket-azure-prefix-full": "yarn test-driver -i dist/test/snowflake-export-bucket-azure-prefix-full.test.js",
Expand All @@ -59,7 +60,7 @@
"redshift-core": "yarn test-driver -i dist/test/redshift-core.test.js",
"redshift-full": "yarn test-driver -i dist/test/redshift-full.test.js",
"redshift-export-bucket-s3-full": "yarn test-driver -i dist/test/redshift-export-bucket-s3-full.test.js",
"update-all-snapshots-local": "yarn run athena-export-bucket-s3-full --mode=local -u; yarn run bigquery-export-bucket-gcs-full --mode=local -u; yarn run clickhouse-full --mode=local -u; yarn run clickhouse-export-bucket-s3-full --mode=local -u; yarn run clickhouse-export-bucket-s3-prefix-full --mode=local -u; yarn run databricks-jdbc-export-bucket-azure-full --mode=local -u; yarn run databricks-jdbc-export-bucket-azure-prefix-full --mode=local -u; yarn run databricks-jdbc-export-bucket-gcs-full --mode=local -u; yarn run databricks-jdbc-export-bucket-gcs-prefix-full --mode=local -u; yarn run databricks-jdbc-export-bucket-s3-full --mode=local -u; yarn run databricks-jdbc-export-bucket-s3-prefix-full --mode=local -u; yarn run databricks-jdbc-full --mode=local -u; yarn run mssql-full --mode=local -u; yarn run mysql-full --mode=local -u; yarn run postgres-full --mode=local -u; yarn run redshift-export-bucket-s3-full --mode=local -u; yarn run redshift-full --mode=local -u; yarn run snowflake-encrypted-pk-full --mode=local -u; yarn run snowflake-export-bucket-azure-full --mode=local -u; yarn run snowflake-export-bucket-azure-prefix-full --mode=local -u; yarn run snowflake-export-bucket-azure-via-storage-integration-full --mode=local -u; yarn run snowflake-export-bucket-gcs-full --mode=local -u; yarn run snowflake-export-bucket-gcs-prefix-full --mode=local -u; yarn run snowflake-export-bucket-s3-full --mode=local -u; yarn run snowflake-export-bucket-s3-prefix-full --mode=local -u; yarn run snowflake-export-bucket-azure-prefix-full --mode=local -u; yarn run snowflake-export-bucket-azure-full --mode=local -u; yarn run snowflake-full --mode=local -u",
"update-all-snapshots-local": "yarn run athena-export-bucket-s3-full --mode=local -u; yarn run bigquery-export-bucket-gcs-full --mode=local -u; yarn run clickhouse-full --mode=local -u; yarn run clickhouse-export-bucket-s3-full --mode=local -u; yarn run clickhouse-export-bucket-s3-prefix-full --mode=local -u; yarn run databricks-jdbc-export-bucket-azure-full --mode=local -u; yarn run databricks-jdbc-export-bucket-azure-prefix-full --mode=local -u; yarn run databricks-jdbc-export-bucket-gcs-full --mode=local -u; yarn run databricks-jdbc-export-bucket-gcs-prefix-full --mode=local -u; yarn run databricks-jdbc-export-bucket-s3-full --mode=local -u; yarn run databricks-jdbc-export-bucket-s3-prefix-full --mode=local -u; yarn run databricks-jdbc-full --mode=local -u; yarn run mssql-full --mode=local -u; yarn run mysql-full --mode=local -u; yarn run postgres-full --mode=local -u; yarn run redshift-export-bucket-s3-full --mode=local -u; yarn run redshift-full --mode=local -u; yarn run snowflake-encrypted-pk-full --mode=local -u; yarn run snowflake-export-bucket-azure-full --mode=local -u; yarn run snowflake-export-bucket-azure-prefix-full --mode=local -u; yarn run snowflake-export-bucket-azure-via-storage-integration-full --mode=local -u; yarn run snowflake-export-bucket-gcs-full --mode=local -u; yarn run snowflake-export-bucket-gcs-prefix-full --mode=local -u; yarn run snowflake-export-bucket-s3-full --mode=local -u; yarn run snowflake-export-bucket-s3-via-storage-integration-iam-roles-full --mode=local -u; yarn run snowflake-export-bucket-s3-prefix-full --mode=local -u; yarn run snowflake-export-bucket-azure-prefix-full --mode=local -u; yarn run snowflake-export-bucket-azure-full --mode=local -u; yarn run snowflake-full --mode=local -u",
"tst": "clear && yarn tsc && yarn bigquery-core"
},
"files": [
Expand Down
Loading
Loading