From 20d6ad21c2e405819c85fae0fea5104f420ed32d Mon Sep 17 00:00:00 2001 From: morgan-at-cube <153563892+morgan-at-cube@users.noreply.github.com> Date: Fri, 27 Sep 2024 17:55:38 -0700 Subject: [PATCH] Update databricks-jdbc.mdx Update export bucket instructions to use unity catalog. The previous recommendation (DBFS) is no longer supported by Databricks. --- .../configuration/data-sources/databricks-jdbc.mdx | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/pages/product/configuration/data-sources/databricks-jdbc.mdx b/docs/pages/product/configuration/data-sources/databricks-jdbc.mdx index 9bd4bd942a362..d10677842d2ec 100644 --- a/docs/pages/product/configuration/data-sources/databricks-jdbc.mdx +++ b/docs/pages/product/configuration/data-sources/databricks-jdbc.mdx @@ -59,7 +59,7 @@ docker run -it -p 4000:4000 --env-file=.env cube-jdk | `CUBEJS_DB_DATABRICKS_ACCEPT_POLICY` | Whether or not to accept the license terms for the Databricks JDBC driver | `true`, `false` | ✅ | | `CUBEJS_DB_DATABRICKS_TOKEN` | The [personal access token][databricks-docs-pat] used to authenticate the Databricks connection | A valid token | ✅ | | `CUBEJS_DB_DATABRICKS_CATALOG` | The name of the [Databricks catalog][databricks-catalog] to connect to | A valid catalog name | ❌ | -| `CUBEJS_DB_EXPORT_BUCKET_MOUNT_DIR` | The path for the [Databricks DBFS mount][databricks-docs-dbfs] | A valid mount path | ❌ | +| `CUBEJS_DB_EXPORT_BUCKET_MOUNT_DIR` | The path for the [Databricks DBFS mount][databricks-docs-dbfs] (Not needed if using Unity Catalog connection) | A valid mount path | ❌ | | `CUBEJS_CONCURRENCY` | The number of concurrent connections each queue has to the database. Default is `2` | A valid number | ❌ | | `CUBEJS_DB_MAX_POOL` | The maximum number of concurrent database connections to pool. Default is `8` | A valid number | ❌ | @@ -103,7 +103,7 @@ Storage][azure-bs] for export bucket functionality. #### AWS S3 To use AWS S3 as an export bucket, first complete [the Databricks guide on -mounting S3 buckets to Databricks DBFS][databricks-docs-dbfs-s3]. +connecting to cloud object storage using Unity Catalog][databricks-docs-uc-s3]. @@ -123,7 +123,7 @@ CUBEJS_DB_EXPORT_BUCKET_AWS_REGION= #### Azure Blob Storage To use Azure Blob Storage as an export bucket, follow [the Databricks guide on -mounting Azure Blob Storage to Databricks DBFS][databricks-docs-dbfs-azure]. +connecting to Azure Data Lake Storage Gen2 and Blob Storage][databricks-docs-azure]. [Retrieve the storage account access key][azure-bs-docs-get-key] from your Azure account and use as follows: @@ -152,10 +152,10 @@ bucket][self-preaggs-export-bucket] **must be** configured. https://docs.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys [databricks]: https://databricks.com/ [databricks-docs-dbfs]: https://docs.databricks.com/en/dbfs/mounts.html -[databricks-docs-dbfs-azure]: - https://docs.databricks.com/data/data-sources/azure/azure-storage.html#mount-azure-blob-storage-containers-to-dbfs -[databricks-docs-dbfs-s3]: - https://docs.databricks.com/data/data-sources/aws/amazon-s3.html#access-s3-buckets-through-dbfs +[databricks-docs-azure]: + https://docs.databricks.com/data/data-sources/azure/azure-storage.html +[databricks-docs-uc-s3]: + https://docs.databricks.com/en/connect/unity-catalog/index.html [databricks-docs-jdbc-url]: https://docs.databricks.com/integrations/bi/jdbc-odbc-bi.html#get-server-hostname-port-http-path-and-jdbc-url [databricks-docs-pat]: