diff --git a/astro.config.mjs b/astro.config.mjs index 72db8327..3c7bcb1a 100644 --- a/astro.config.mjs +++ b/astro.config.mjs @@ -287,7 +287,7 @@ export default defineConfig({ collapsed: true, items: [ { - label: 'Welcome', + label: 'Introduction', slug: 'snowflake', }, { @@ -324,10 +324,6 @@ export default defineConfig({ label: 'SQL Functions', slug: 'snowflake/sql-functions', }, - { - label: 'Feature Coverage', - slug: 'snowflake/coverage-features', - }, { label: 'Changelog', slug: 'snowflake/changelog', diff --git a/public/images/snowflake/credit_df_cat.png b/public/images/snowflake/credit_df_cat.png new file mode 100644 index 00000000..8ec0ef1d Binary files /dev/null and b/public/images/snowflake/credit_df_cat.png differ diff --git a/public/images/snowflake/credit_df_hist.png b/public/images/snowflake/credit_df_hist.png new file mode 100644 index 00000000..c572b4ed Binary files /dev/null and b/public/images/snowflake/credit_df_hist.png differ diff --git a/public/images/snowflake/dbeaver-new-connection.png b/public/images/snowflake/dbeaver-new-connection.png new file mode 100644 index 00000000..d196371f Binary files /dev/null and b/public/images/snowflake/dbeaver-new-connection.png differ diff --git a/public/images/snowflake/proxy-web-ui.png b/public/images/snowflake/proxy-web-ui.png new file mode 100644 index 00000000..0d255b9e Binary files /dev/null and b/public/images/snowflake/proxy-web-ui.png differ diff --git a/public/images/snowflake/request-logs-web-ui.png b/public/images/snowflake/request-logs-web-ui.png new file mode 100644 index 00000000..ca4cc96f Binary files /dev/null and b/public/images/snowflake/request-logs-web-ui.png differ diff --git a/public/images/snowflake/run-sql-queries-web-ui.png b/public/images/snowflake/run-sql-queries-web-ui.png new file mode 100644 index 00000000..884975bb Binary files /dev/null and b/public/images/snowflake/run-sql-queries-web-ui.png differ diff --git a/src/content/docs/snowflake/capabilities/configuration.md b/src/content/docs/snowflake/capabilities/configuration.md index 4851501c..66a79b83 100644 --- a/src/content/docs/snowflake/capabilities/configuration.md +++ b/src/content/docs/snowflake/capabilities/configuration.md @@ -6,16 +6,13 @@ nav: label: --- - - - LocalStack exposes various configuration options to control its behaviour. These options can be passed to LocalStack as environment variables like so: -{{< command >}} -$ DEBUG=1 localstack start -{{< / command >}} +```bash +DEBUG=1 localstack start +``` ## Core @@ -39,7 +36,7 @@ These options are applicable when using the CLI to start LocalStack. | Variable | Example Values | Description | | - | - | - | | `LOCALSTACK_VOLUME_DIR` | `~/.cache/localstack/volume` (on Linux) | The location on the host of the LocalStack volume directory mount. | -| `CONFIG_PROFILE` | | The configuration profile to load. See [Profiles]({{< ref "#profiles" >}}) | +| `CONFIG_PROFILE` | | The configuration profile to load. See [Profiles](#profiles) | | `CONFIG_DIR` | `~/.localstack` | The path where LocalStack can find configuration profiles and other CLI-specific configuration | ## Docker @@ -72,42 +69,41 @@ A configuration profile is a set of environment variables stored in a `*.env` fi Here is an example of what configuration profiles might look like: -{{< command >}} +```bash $ tree ~/.localstack /home/username/.localstack ├── default.env ├── dev.env └── pro.env -{{< / command >}} +``` Here is an example of what a specific environment profile looks like -{{< command >}} +```bash $ cat ~/.localstack/pro-debug.env LOCALSTACK_AUTH_TOKEN=XXXXX SF_LOG=trace SF_S3_ENDPOINT=s3.localhost.localstack.cloud:4566 -{{< / command >}} +``` You can load a profile by either setting the environment variable `CONFIG_PROFILE=` or the `--profile=` CLI flag when using the CLI. Let's take an example to load the `dev.env` profile file if it exists: -{{< command >}} -$ IMAGE_NAME=localstack/snowflake localstack --profile=dev start -{{< / command >}} +```bash +IMAGE_NAME=localstack/snowflake localstack --profile=dev start +``` If no profile is specified, the `default.env` profile will be loaded. If explicitly specified, any environment variables will overwrite the configurations defined in the profile. To display the config environment variables, you can use the following command: -{{< command >}} -$ localstack --profile=dev config show -{{< / command >}} +```bash +localstack --profile=dev config show +``` -{{< alert title="Note" >}} +:::note The `CONFIG_PROFILE` is a CLI feature and cannot be used directly with a docker-compose setup. You can look at [alternative means of setting environment variables](https://docs.docker.com/compose/environment-variables/set-environment-variables/) for your Docker Compose setups. For Docker setups, we recommend passing the environment variables directly to the `docker run` command. -{{< /alert >}} - +::: diff --git a/src/content/docs/snowflake/capabilities/init-hooks.md b/src/content/docs/snowflake/capabilities/init-hooks.mdx similarity index 90% rename from src/content/docs/snowflake/capabilities/init-hooks.md rename to src/content/docs/snowflake/capabilities/init-hooks.mdx index 2d54717c..9292b8b0 100644 --- a/src/content/docs/snowflake/capabilities/init-hooks.md +++ b/src/content/docs/snowflake/capabilities/init-hooks.mdx @@ -3,8 +3,8 @@ title: Initialization Hooks description: Writing SQL scripts to initialize your Snowflake emulator --- +import { Tabs, TabItem } from '@astrojs/starlight/components'; - ## Introduction LocalStack for Snowflake supports automatically executing `*.sf.sql` files via [Init Hooks](https://docs.localstack.cloud/references/init-hooks/) when mounted into the Docker container. A script can be added to one of these stages in the lifecycle: @@ -28,8 +28,9 @@ SHOW DATABASES; Mount the script into `/etc/localstack/init/ready.d/` using Docker Compose or the `localstack` CLI: -{{< tabpane >}} -{{< tab header="docker-compose.yml" lang="yml" >}} + + +```yaml version: "3.8" services: @@ -47,13 +48,17 @@ services: - "/path/to/test.sf.sql:/etc/localstack/init/ready.d/test.sf.sql" # ready hook - "${LOCALSTACK_VOLUME_DIR:-./volume}:/var/lib/localstack" - "/var/run/docker.sock:/var/run/docker.sock" -{{< /tab >}} -{{< tab header="CLI" lang="bash" >}} +``` + + +```bash # DOCKER_FLAGS are additional parameters to the `docker run` command of localstack start - -DOCKER_FLAGS='-v /path/to/test.sf.sql:/etc/localstack/init/ready.d/test.sf.sql' DEBUG=1 localstack start -{{< /tab >}} -{{< /tabpane >}} +DOCKER_FLAGS='-v /path/to/test.sf.sql:/etc/localstack/init/ready.d/test.sf.sql' \ +DEBUG=1 \ +localstack start +``` + + Start the Snowflake emulator, and the following logs will appear: diff --git a/src/content/docs/snowflake/capabilities/state-management.md b/src/content/docs/snowflake/capabilities/state-management.mdx similarity index 86% rename from src/content/docs/snowflake/capabilities/state-management.md rename to src/content/docs/snowflake/capabilities/state-management.mdx index b4d58ea4..6b222b18 100644 --- a/src/content/docs/snowflake/capabilities/state-management.md +++ b/src/content/docs/snowflake/capabilities/state-management.mdx @@ -3,7 +3,7 @@ title: State Management description: Get started with State Management in LocalStack for Snowflake --- - +import { Tabs, TabItem } from '@astrojs/starlight/components'; ## Introduction @@ -22,11 +22,17 @@ LocalStack’s Persistence mechanism enables the saving and restoration of the e To start snapshot-based persistence, launch LocalStack with the configuration option `PERSISTENCE=1`. This setting instructs LocalStack to save all local Snowflake resources and their respective application states into the LocalStack Volume Directory. Upon restarting LocalStack, you'll be able to resume your activities exactly where you left off. -{{< tabpane >}} -{{< tab header="LocalStack CLI" lang="bash" >}} -LOCALSTACK_AUTH_TOKEN=... PERSISTENCE=1 IMAGE_NAME=localstack/snowflake localstack start -{{< /tab >}} -{{< tab header="Docker Compose" lang="yaml" >}} + + +```bash +LOCALSTACK_AUTH_TOKEN=... \ +PERSISTENCE=1 \ +IMAGE_NAME=localstack/snowflake \ +localstack start +``` + + +```yaml ... image: localstack/snowflake environment: @@ -34,22 +40,25 @@ LOCALSTACK_AUTH_TOKEN=... PERSISTENCE=1 IMAGE_NAME=localstack/snowflake localsta - PERSISTENCE=1 volumes: - "${LOCALSTACK_VOLUME_DIR:-./volume}:/var/lib/localstack" -{{< /tab >}} -{{< tab header="Docker" lang="bash" >}} +``` + + +```bash docker run \ -e LOCALSTACK_AUTH_TOKEN=${LOCALSTACK_AUTH_TOKEN:?} \ -e PERSISTENCE=1 \ -v ./volume:/var/lib/localstack \ -p 4566:4566 \ localstack/snowflake -{{< /tab >}} -{{< /tabpane >}} +``` + + -{{< alert title="Note">}} +:::note Snapshots may not be compatible across different versions of LocalStack. It is possible that snapshots from older versions can be restored, but there are no guarantees as to whether LocalStack will start into a consistent state. We are actively working on a solution for this problem. -{{< /alert >}} +::: ## Export/Import State @@ -59,9 +68,9 @@ The Export/Import State feature enables you to export the state of your LocalSta To export the state, you can run the following command: -{{< command >}} -$ localstack state export '' -{{< /command >}} +```bash +localstack state export '' +``` You can use the `` argument to specify a file path to export the state to. If you do not specify a file path, the state will be exported to the current working directory into a file named `ls-state-export`. @@ -69,8 +78,8 @@ You can use the `` argument to specify a file path to export the stat To import the state, you can run the following command: -{{< command >}} -$ localstack state import '' -{{< /command >}} +```bash +localstack state import '' +``` The `` argument is required and specifies the file path to import the state from. The file should be generated from a previous export. diff --git a/src/content/docs/snowflake/features/accounts.md b/src/content/docs/snowflake/features/accounts.md index dfd588a7..cb780846 100644 --- a/src/content/docs/snowflake/features/accounts.md +++ b/src/content/docs/snowflake/features/accounts.md @@ -47,7 +47,7 @@ You can also specify the account for Snowflake drivers that let you connect with Example establishing a JDBC connection: -``` +```text jdbc:snowflake://snowflake.localhost.localstack.cloud:4566/?account=your_account ``` @@ -67,4 +67,4 @@ The query statement will return the name of the account you are currently connec |------------------------------------------| | YOUR_ACCOUNT | +------------------------------------------+ -``` \ No newline at end of file +``` diff --git a/src/content/docs/snowflake/features/authentication.md b/src/content/docs/snowflake/features/authentication.md index 35622ee0..89c32ab6 100644 --- a/src/content/docs/snowflake/features/authentication.md +++ b/src/content/docs/snowflake/features/authentication.md @@ -32,9 +32,9 @@ sf_conn_obj = sf.connect( The default username and password are set to `test` and can be changed using `SF_DEFAULT_USER` and `SF_DEFAULT_PASSWORD` when starting the Snowflake emulator. -{{< alert title="Note" >}} +:::note It is not recommended to use your production credentials in the Snowflake emulator. -{{< /alert >}} +::: ## RSA key pair authentication @@ -66,6 +66,6 @@ conn = snowflake.connector.connect( ) ``` -{{< alert title="Note" >}} +:::note The Snowflake emulator does not validate key contents—RSA authentication is mocked for local testing only. -{{< /alert >}} +::: diff --git a/src/content/docs/snowflake/features/clones.md b/src/content/docs/snowflake/features/clones.md index 33c58cb8..f2215566 100644 --- a/src/content/docs/snowflake/features/clones.md +++ b/src/content/docs/snowflake/features/clones.md @@ -3,8 +3,6 @@ title: Clones description: Get started with Clones in LocalStack for Snowflake --- - - ## Introduction Cloning in Snowflake allows you to create a quick, zero-copy duplicate of an existing database, schema, or table. This feature enables users to replicate data structures and content for testing or development without duplicating the underlying storage. @@ -110,4 +108,4 @@ The expected output is: | 1 | test | +----+------+ 1 Row(s) produced. Time Elapsed: 0.012s -``` \ No newline at end of file +``` diff --git a/src/content/docs/snowflake/features/cross-database-resource-sharing.md b/src/content/docs/snowflake/features/cross-database-resource-sharing.md index e69149ca..c9bed215 100644 --- a/src/content/docs/snowflake/features/cross-database-resource-sharing.md +++ b/src/content/docs/snowflake/features/cross-database-resource-sharing.md @@ -3,8 +3,6 @@ title: Cross-Database Resource Sharing description: Get started with cross-database resource sharing in the Snowflake emulator --- - - ## Introduction Snowflake data providers can easily share data from various databases using secure views. These views can include schemas, tables, and other views from one or more databases, as long as they're part of the same account. @@ -97,4 +95,4 @@ The expected output is: ```plaintext (1, 2, 3) -``` \ No newline at end of file +``` diff --git a/src/content/docs/snowflake/features/dynamic-tables.md b/src/content/docs/snowflake/features/dynamic-tables.md index 8fb30883..74180951 100644 --- a/src/content/docs/snowflake/features/dynamic-tables.md +++ b/src/content/docs/snowflake/features/dynamic-tables.md @@ -3,16 +3,11 @@ title: Dynamic Tables description: Get started with Dynamic Tables in LocalStack for Snowflake --- - - ## Introduction Snowflake Dynamic Tables enable a background process to continuously load new data from sources into the table, supporting both delta and full load operations. A dynamic table automatically updates to reflect query results, removing the need for a separate target table and custom code for data transformation. This table is kept current through regularly scheduled refreshes by an automated process. -The Snowflake emulator supports Dynamic tables, allowing you to create and manage Dynamic tables locally. The following operations are supported: - -* [`CREATE DYNAMIC TABLE`](https://docs.snowflake.com/en/sql-reference/sql/create-dynamic-table) -* [`DROP DYNAMIC TABLE`](https://docs.snowflake.com/en/sql-reference/sql/drop-dynamic-table) +The Snowflake emulator supports Dynamic tables, allowing you to create and manage Dynamic tables locally. ## Getting started @@ -91,4 +86,4 @@ The output should be: | 1 | foo | | 2 | bar | +----+------+ -``` \ No newline at end of file +``` diff --git a/src/content/docs/snowflake/features/hybrid-tables.md b/src/content/docs/snowflake/features/hybrid-tables.md index e94dae1c..d3faa8cd 100644 --- a/src/content/docs/snowflake/features/hybrid-tables.md +++ b/src/content/docs/snowflake/features/hybrid-tables.md @@ -3,17 +3,11 @@ title: Hybrid Tables description: Get started with Hybrid Tables in LocalStack for Snowflake --- - - ## Introduction Snowflake Hybrid tables, also known as Unistore hybrid tables, support fast, single-row operations by enforcing unique constraints for required primary keys and including indexes to speed up data retrieval. These tables are designed to optimize support for both analytical and transactional workloads simultaneously, underpinning Snowflake's Unistore architecture. -The Snowflake emulator supports Hybrid tables, allowing you to create and manage Hybrid tables locally. The following operations are supported: - -* [`CREATE HYBRID TABLE`](https://docs.snowflake.com/en/sql-reference/sql/create-hybrid-table) -* [`DROP HYBRID TABLE`](https://docs.snowflake.com/en/sql-reference/sql/drop-hybrid-table) -* [`SHOW HYBRID TABLES`](https://docs.snowflake.com/en/sql-reference/sql/show-hybrid-tables) +The Snowflake emulator supports Hybrid tables, allowing you to create and manage Hybrid tables locally. ## Getting started @@ -73,4 +67,4 @@ The output should be: | -----------------------------------------+ | TEST-TABLE successfully dropped. | +------------------------------------------+ -``` \ No newline at end of file +``` diff --git a/src/content/docs/snowflake/features/iceberg-tables.md b/src/content/docs/snowflake/features/iceberg-tables.md index c5aaaaf7..9cf5914c 100644 --- a/src/content/docs/snowflake/features/iceberg-tables.md +++ b/src/content/docs/snowflake/features/iceberg-tables.md @@ -3,8 +3,6 @@ title: Iceberg Tables description: This is a dummy description. --- - - ## Introduction Iceberg tables uses [Apache Iceberg](https://iceberg.apache.org/) open table format specification to provide an abstraction layer on data files stored in open formats. Iceberg tables for Snowflake offer schema evolution, partitioning, and snapshot isolation to manage the table data efficiently. @@ -81,6 +79,6 @@ The output should be: You can also list the content of the S3 bucket: -{{< command >}} -$ awslocal s3 ls --recursive s3://test-bucket/ -{{< / command >}} \ No newline at end of file +```bash +awslocal s3 ls --recursive s3://test-bucket/ +``` diff --git a/src/content/docs/snowflake/coverage-features.md b/src/content/docs/snowflake/features/index.md similarity index 100% rename from src/content/docs/snowflake/coverage-features.md rename to src/content/docs/snowflake/features/index.md diff --git a/src/content/docs/snowflake/features/materialized-views.md b/src/content/docs/snowflake/features/materialized-views.md index c81071f5..8e49ad2b 100644 --- a/src/content/docs/snowflake/features/materialized-views.md +++ b/src/content/docs/snowflake/features/materialized-views.md @@ -7,14 +7,7 @@ description: Get started with Materialized Views in LocalStack for Snowflake Materialized views are a feature of Snowflake that allows you to create a persistent view of a table. This view is pre-computed and stored in the database, allowing for faster queries and improved performance. -The Snowflake emulator supports Materialized Views, allowing you to accurately test materialized view logic and behavior in local development environments. The following operations are supported: - -- [`CREATE MATERIALIZED VIEW`](https://docs.snowflake.com/en/sql-reference/sql/create-materialized-view) -- [`ALTER MATERIALIZED VIEW`](https://docs.snowflake.com/en/sql-reference/sql/alter-materialized-view) -- [`DESCRIBE MATERIALIZED VIEW`](https://docs.snowflake.com/en/sql-reference/sql/desc-materialized-view) -- [`DROP MATERIALIZED VIEW`](https://docs.snowflake.com/en/sql-reference/sql/drop-materialized-view) -- [`SHOW MATERIALIZED VIEWS`](https://docs.snowflake.com/en/sql-reference/sql/show-materialized-views) -- [`TRUNCATE MATERIALIZED VIEW`](https://docs.snowflake.com/en/sql-reference/sql/truncate-materialized-view) +The Snowflake emulator supports Materialized Views, allowing you to accurately test materialized view logic and behavior in local development environments. ## Getting started diff --git a/src/content/docs/snowflake/features/native-apps.md b/src/content/docs/snowflake/features/native-apps.md index eaa6674e..0cee478d 100644 --- a/src/content/docs/snowflake/features/native-apps.md +++ b/src/content/docs/snowflake/features/native-apps.md @@ -7,13 +7,7 @@ description: Get started with Native Apps in LocalStack for Snowflake Snowflake Native Apps are applications built and executed directly within the Snowflake Data Cloud platform. These apps can be used to extend the capabilities of Snowflake by integrating with external services, automating workflows, and building custom data applications. These apps are developed using Snowflake-native tools (e.g., Snowflake SQL, Snowflake API, and JavaScript) and can be distributed on the Snowflake Marketplace. -The Snowflake emulator supports creating & deploying Native Apps locally with the same statements as the Snowflake service. The following operations are supported: - -- [`CREATE APPLICATIONS`](https://docs.snowflake.com/en/sql-reference/sql/create-application.html) -- [`SHOW APPLICATION PACKAGES`](https://docs.snowflake.com/en/sql-reference/sql/show-application-packages.html) -- [`ALTER APPLICATION PACKAGE`](https://docs.snowflake.com/en/sql-reference/sql/alter-application-package.html) -- [`DESCRIBE APPLICATION`]( https://docs.snowflake.com/en/sql-reference/sql/desc-application) -- [`DROP APPLICATION PACKAGE`](https://docs.snowflake.com/en/sql-reference/sql/drop-application-package.html) +The Snowflake emulator supports creating & deploying Native Apps locally with the same statements as the Snowflake service. ## Getting started @@ -66,14 +60,14 @@ You can access the Native App by visiting your preferred browser and navigating https://snowflake.localhost.localstack.cloud:4566/apps/test/test/TASKS_STREAMS_APP_username/ ``` -{{< alert title="Note" >}} +:::note The URL above is an example. Change the outputted URL by: 1. Replacing `https://app.snowflake.com` with `https://snowflake.localhost.localstack.cloud:4566`. 2. Changing the path structure from `/#/apps/application/` to `/apps/test/test/`. You can make additional changes depending on your local setup. -{{< /alert >}} +::: The following app should be displayed: diff --git a/src/content/docs/snowflake/features/snowpipe.md b/src/content/docs/snowflake/features/snowpipe.md index 9aa90f2b..b5bc193e 100644 --- a/src/content/docs/snowflake/features/snowpipe.md +++ b/src/content/docs/snowflake/features/snowpipe.md @@ -7,12 +7,7 @@ description: Get started with Snowpipe in LocalStack for Snowflake Snowpipe allows you to load data into Snowflake tables from files stored in an external stage. Snowpipe continuously loads data from files in a stage into a table as soon as the files are available. Snowpipe uses a queue to manage the data loading process, which allows you to load data into Snowflake tables in near real-time. -The Snowflake emulator supports Snowpipe, allowing you to create and manage Snowpipe objects in the emulator. You can use Snowpipe to load data into Snowflake tables from files stored in a local directory or a local/remote S3 bucket. The following operations are supported: - -* `CREATE PIPE` -* `DESCRIBE PIPE` -* `DROP PIPE` -* `SHOW PIPES` +The Snowflake emulator supports Snowpipe, allowing you to create and manage Snowpipe objects in the emulator. You can use Snowpipe to load data into Snowflake tables from files stored in a local directory or a local/remote S3 bucket. ## Getting started @@ -24,9 +19,9 @@ In this guide, you will create a stage, and a pipe to load data from a local S3 You can create a local S3 bucket using the `mb` command with the `awslocal` CLI. -{{< command >}} -$ awslocal s3 mb s3://test-bucket -{{< / command >}} +```bash +awslocal s3 mb s3://test-bucket +``` ### Create a stage @@ -73,11 +68,11 @@ Retrieve the `notification_channel` value from the output of the `DESC PIPE` que You can use the [`PutBucketNotificationConfiguration`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutBucketNotificationConfiguration.html) API to create a bucket notification configuration that sends notifications to Snowflake when new files are uploaded to the S3 bucket. -{{< command >}} -$ awslocal s3api put-bucket-notification-configuration \ +```bash +awslocal s3api put-bucket-notification-configuration \ --bucket test-bucket \ --notification-configuration file://notification.json -{{< / command >}} +``` The `notification.json` file should contain the following configuration: @@ -107,9 +102,9 @@ Copy a JSON file to the S3 bucket to trigger the pipe to load the data into the Upload the file to the S3 bucket: -{{< command >}} -$ awslocal s3 cp test.json s3://test-bucket/ -{{< / command >}} +```bash +awslocal s3 cp test.json s3://test-bucket/ +``` ### Check the data @@ -117,4 +112,4 @@ After uploading the file to the S3 bucket in the previous step, the contents of ```sql SELECT * FROM my_test_table -``` \ No newline at end of file +``` diff --git a/src/content/docs/snowflake/features/stages.md b/src/content/docs/snowflake/features/stages.mdx similarity index 88% rename from src/content/docs/snowflake/features/stages.md rename to src/content/docs/snowflake/features/stages.mdx index 40757afc..603dd7f6 100644 --- a/src/content/docs/snowflake/features/stages.md +++ b/src/content/docs/snowflake/features/stages.mdx @@ -2,19 +2,13 @@ title: Stages description: Get started with Stages in LocalStack for Snowflake --- - - +import { Tabs, TabItem } from '@astrojs/starlight/components'; ## Introduction Stages are a way to load data into Snowflake. You can use stages to load data from files in a variety of formats, including CSV, JSON, and Parquet. You can also use stages to load data from external cloud storage services, such as Amazon S3, Google Cloud Storage, and Microsoft Azure Blob Storage. -The Snowflake emulator supports stages, allowing you to load data into Snowflake using the same commands and syntax as the Snowflake service. The following operations are supported: - -- [`CREATE STAGE`](https://docs.snowflake.com/en/sql-reference/sql/create-stage.html) -- [`DESCRIBE STAGE`](https://docs.snowflake.com/en/sql-reference/sql/desc-stage) -- [`DROP STAGE`](https://docs.snowflake.com/en/sql-reference/sql/drop-stage.html) -- [`SHOW STAGES`](https://docs.snowflake.com/en/sql-reference/sql/show-stages) +The Snowflake emulator supports stages, allowing you to load data into Snowflake using the same commands and syntax as the Snowflake service. ## Getting started @@ -60,14 +54,22 @@ FILE_FORMAT = csv; In this example, you can upload the CSV files to the table stage provided for `employees` table. -{{< tabpane >}} -{{< tab header="Linux/macOS" lang="sql" >}} + + + + ```sql PUT file://./employees0*.csv @@employees_stage AUTO_COMPRESS=TRUE; -{{< /tab >}} -{{< tab header="Windows" lang="sql" >}} +``` + + + ```sql PUT file://C:\temp\employees0*.csv @@employees_stage AUTO_COMPRESS=TRUE; -{{< /tab >}} -{{< /tabpane >}} +``` + + + + + The expected output is: @@ -87,10 +89,10 @@ The expected output is: You can also load data from an S3 bucket using the `CREATE STAGE` command. Create a new S3 bucket named `testbucket` and upload the [employees CSV files](./getting-started.zip) to the bucket. You can use LocalStack's `awslocal` CLI to create the S3 bucket and upload the files. -{{< command >}} +```bash awslocal s3 mb s3://testbucket awslocal s3 cp employees0*.csv s3://testbucket -{{< /command >}} +``` In this example, you can create a stage called `my_s3_stage` to load data from an S3 bucket: @@ -107,4 +109,4 @@ You can further copy data from the S3 stage to the table using the `COPY INTO` c COPY INTO mytable FROM @my_s3_stage PATTERN='.*employees.*.csv'; -``` \ No newline at end of file +``` diff --git a/src/content/docs/snowflake/features/storage-integrations.md b/src/content/docs/snowflake/features/storage-integrations.md index c3db2666..90cf1130 100644 --- a/src/content/docs/snowflake/features/storage-integrations.md +++ b/src/content/docs/snowflake/features/storage-integrations.md @@ -3,17 +3,11 @@ title: Storage Integrations description: Get started with Storage Integrations in LocalStack for Snowflake --- - - ## Introduction Snowflake storage integrations enable access to external cloud storage like Amazon S3, Google Cloud Storage, and Azure Blob Storage. They manage authentication through generated IAM roles, enhancing security and simplifying data operations without exposing sensitive credentials. This approach centralizes and controls access, streamlining workflows across major cloud platforms. -The Snowflake emulator supports storage integrations, allowing you to test interactions with external storage using the same commands and syntax as the Snowflake service. The following operations are supported: - -- [`CREATE STORAGE INTEGRATION`](https://docs.snowflake.com/en/sql-reference/sql/create-storage-integration) -- [`DESCRIBE STORAGE INTEGRATION`](https://docs.snowflake.com/en/sql-reference/sql/describe-storage-integration) -- [`DROP STORAGE INTEGRATION`](https://docs.snowflake.com/en/sql-reference/sql/drop-storage-integration) +The Snowflake emulator supports storage integrations, allowing you to test interactions with external storage using the same commands and syntax as the Snowflake service. ## Getting started @@ -26,7 +20,7 @@ In this guide, you will create a Snowflake Storage Integration with Amazon S3 an You can create a local S3 bucket using the `mb` command with the `awslocal` CLI. ```bash -$ awslocal s3 mb s3://testbucket +awslocal s3 mb s3://testbucket ``` Upload some sample CSV file into the S3 bucket using the following command: @@ -114,4 +108,4 @@ To list the files in the stage, you can run the following statement: LIST @stage_example; ``` -The output will show the `files.csv` file that we uploaded earlier to the S3 bucket. \ No newline at end of file +The output will show the `files.csv` file that we uploaded earlier to the S3 bucket. diff --git a/src/content/docs/snowflake/features/streamlit.md b/src/content/docs/snowflake/features/streamlit.md index 1a2c1ac8..5723f880 100644 --- a/src/content/docs/snowflake/features/streamlit.md +++ b/src/content/docs/snowflake/features/streamlit.md @@ -3,19 +3,11 @@ title: Streamlit description: Get started with Streamlit in LocalStack for Snowflake --- - - ## Introduction Snowflake provides SQL commands to create and modify a `STREAMLIT` object. Streamlit is a Python library that allows you to create web applications with simple Python scripts. With Streamlit, you can create interactive web applications without having to learn complex web development technologies. -The Snowflake emulator supports Streamlit, allowing you to create Streamlit applications using the same commands and syntax as the Snowflake service. The following operations are supported: - -- `CREATE STREAMLIT` -- `SHOW STREAMLITS` -- `DESCRIBE STREAMLIT` -- `ALTER STREAMLIT` -- `DROP STREAMLIT` +The Snowflake emulator supports Streamlit, allowing you to create Streamlit applications using the same commands and syntax as the Snowflake service. ## Getting started @@ -98,4 +90,4 @@ host = "snowflake.localhost.localstack.cloud" Currently, the Snowflake emulator supports CRUD operations to create Streamlit application entries in the Snowflake emulator, but support for hosting the Web UIs of these Streamlit apps is not yet available. -Users can run Streamlit apps locally by using the `streamlit run main.py` command and connecting to the local Snowflake instance. \ No newline at end of file +Users can run Streamlit apps locally by using the `streamlit run main.py` command and connecting to the local Snowflake instance. diff --git a/src/content/docs/snowflake/features/streams.md b/src/content/docs/snowflake/features/streams.md index 74606397..f99d4dea 100644 --- a/src/content/docs/snowflake/features/streams.md +++ b/src/content/docs/snowflake/features/streams.md @@ -3,8 +3,6 @@ title: Streams description: Get started with Streams in LocalStack for Snowflake --- - - ## Introduction Streams allow you to track changes made to a table. Streams capture changes made to a table, such as inserts, updates, and deletes, and store the changes in a log that you can query to see what changes have been made. diff --git a/src/content/docs/snowflake/features/tags.md b/src/content/docs/snowflake/features/tags.md index b2a5eb7f..c12fbb3c 100644 --- a/src/content/docs/snowflake/features/tags.md +++ b/src/content/docs/snowflake/features/tags.md @@ -3,18 +3,11 @@ title: Tags description: Get started with Tags in LocalStack for Snowflake --- - - ## Introduction Snowflake tags allow you to categorize and manage Snowflake objects by associating custom metadata with them. These tags support governance, cost tracking, and data lineage by enabling organizations to label resources with business-relevant information. -The Snowflake emulator supports tags, allowing you to apply these tags to the local Snowflake tables, views, and databases using the same commands and syntax as the Snowflake service. The following operations are supported: - -- [`CREATE TAG`](https://docs.snowflake.com/en/sql-reference/sql/create-tag) -- [`SHOW TAGS`](https://docs.snowflake.com/en/sql-reference/sql/show-tags) -- [`ALTER TAG`](https://docs.snowflake.com/en/sql-reference/sql/alter-tag) -- [`DROP TAG`](https://docs.snowflake.com/en/sql-reference/sql/drop-tag) +The Snowflake emulator supports tags, allowing you to apply these tags to the local Snowflake tables, views, and databases using the same commands and syntax as the Snowflake service. ## Getting started @@ -116,4 +109,4 @@ The expected output is: | TAG_TEST_DB | PUBLIC | TAG1 | test 123 | DATABASE | NULL | NULL | TAG_TEST_DB | DATABASE | NULL | +--------------+------------+----------+-----------+----------+-----------------+---------------+-------------+----------+-------------+ 1 Row(s) produced. Time Elapsed: 0.528s -``` \ No newline at end of file +``` diff --git a/src/content/docs/snowflake/features/tasks.md b/src/content/docs/snowflake/features/tasks.md index 7241148e..28c4e4f5 100644 --- a/src/content/docs/snowflake/features/tasks.md +++ b/src/content/docs/snowflake/features/tasks.md @@ -3,18 +3,11 @@ title: Tasks description: Get started with Tasks in LocalStack for Snowflake --- - - ## Introduction Tasks are user-defined objects that enable the automation of repetitive SQL operations in Snowflake. You can use tasks to schedule SQL statements, such as queries, DDL, and DML operations, to run at a specific time or at regular intervals. -The Snowflake emulator provides a CRUD (Create, Read, Update, Delete) interface to manage tasks. The following operations are supported: - -- [`CREATE TASK`](https://docs.snowflake.com/en/sql-reference/sql/create-task) -- [`DESCRIBE TASK`](https://docs.snowflake.com/en/sql-reference/sql/desc-task) -- [`DROP TASK`](https://docs.snowflake.com/en/sql-reference/sql/drop-task) -- [`SHOW TASKS`](https://docs.snowflake.com/en/sql-reference/sql/show-tasks) +The Snowflake emulator provides a CRUD (Create, Read, Update, Delete) interface to manage tasks. ## Getting started @@ -60,4 +53,4 @@ The expected output is: ```plaintext 123 -``` \ No newline at end of file +``` diff --git a/src/content/docs/snowflake/features/transaction-management.md b/src/content/docs/snowflake/features/transaction-management.md index 4f3e7255..1da6017e 100644 --- a/src/content/docs/snowflake/features/transaction-management.md +++ b/src/content/docs/snowflake/features/transaction-management.md @@ -7,13 +7,7 @@ description: Get started with Transaction Management in LocalStack for Snowflake Transaction Management is a feature that allows you to manage transactions in Snowflake. You can use Transaction Management to create a transaction management system that is specific to your application. -The Snowflake emulator supports Transaction Management, allowing you to emulate realistic database operations that require precise control over when changes are committed or rolled back. The following operations are supported: - -- [BEGIN](https://docs.snowflake.com/en/sql-reference/sql/begin) -- [COMMIT](https://docs.snowflake.com/en/sql-reference/sql/commit) -- [ROLLBACK](https://docs.snowflake.com/en/sql-reference/sql/rollback) -- [CURRENT_TRANSACTION()](https://docs.snowflake.com/en/sql-reference/functions/current_transaction) -- [SHOW TRANSACTIONS](https://docs.snowflake.com/en/sql-reference/sql/show-transactions) +The Snowflake emulator supports Transaction Management, allowing you to emulate realistic database operations that require precise control over when changes are committed or rolled back. ## Getting started diff --git a/src/content/docs/snowflake/getting-started/faq.md b/src/content/docs/snowflake/getting-started/faq.md index 78cecd3c..42e4e323 100644 --- a/src/content/docs/snowflake/getting-started/faq.md +++ b/src/content/docs/snowflake/getting-started/faq.md @@ -2,12 +2,11 @@ title: FAQ description: Frequently asked questions about LocalStack for Snowflake template: doc +sidebar: + order: 3 --- - - - -## FAQs +## Core FAQs ### Are Snowflake v2 APIs supported? @@ -35,4 +34,4 @@ A CI key allows you to use LocalStack in your CI environment. Every activation o ### How can I get help with the Snowflake emulator? -The Snowflake emulator is currently in **preview**. To get help, you can join the [Slack community](https://localstack.cloud/slack) and share your feedback, questions, and suggestions with the LocalStack team on the `#help` channel. If your team is using LocalStack for Snowflake, you can also request support by [contacting us](https://localstack.cloud/contact). We would be happy to setup a private Slack channel for your team to provide dedicated support. \ No newline at end of file +The Snowflake emulator is currently in **preview**. To get help, you can join the [Slack community](https://localstack.cloud/slack) and share your feedback, questions, and suggestions with the LocalStack team on the `#help` channel. If your team is using LocalStack for Snowflake, you can also request support by [contacting us](https://localstack.cloud/contact). We would be happy to setup a private Slack channel for your team to provide dedicated support. diff --git a/src/content/docs/snowflake/getting-started/index.md b/src/content/docs/snowflake/getting-started/index.md index 147d1a82..7a971cf5 100644 --- a/src/content/docs/snowflake/getting-started/index.md +++ b/src/content/docs/snowflake/getting-started/index.md @@ -2,19 +2,19 @@ title: Installation description: Basic installation guide to get started with LocalStack for Snowflake. template: doc +sidebar: + order: 0 --- - - ## Introduction You can use the Snowflake Docker image to run the Snowflake emulator. The Snowflake Docker image is available on the [LocalStack Docker Hub](https://hub.docker.com/r/localstack/snowflake). To pull the Snowflake Docker image, execute the following command: -{{< command >}} -$ docker pull localstack/snowflake -{{< / command >}} +```bash +docker pull localstack/snowflake +``` You can start the Snowflake Docker container using the following methods: @@ -22,32 +22,32 @@ You can start the Snowflake Docker container using the following methods: 2. [`docker` CLI](https://docs.docker.com/get-docker/) 2. [Docker Compose](https://docs.docker.com/compose/install/) -{{}} +:::note Before starting, ensure you have a valid `LOCALSTACK_AUTH_TOKEN` to access the Snowflake emulator. Refer to the [Auth Token guide](https://docs.localstack.cloud/getting-started/auth-token/) to obtain your Auth Token and specify it in the `LOCALSTACK_AUTH_TOKEN` environment variable. -{{}} +::: ### `localstack` CLI To start the Snowflake Docker container using the `localstack` CLI, execute the following command: -{{< command >}} -$ export LOCALSTACK_AUTH_TOKEN= -$ IMAGE_NAME=localstack/snowflake localstack start -{{< / command >}} +```bash +export LOCALSTACK_AUTH_TOKEN= +IMAGE_NAME=localstack/snowflake localstack start +``` ### `docker` CLI To start the Snowflake Docker container using the `docker` CLI, execute the following command: -{{< command >}} -$ docker run \ +```bash +docker run \ --rm -it \ -p 127.0.0.1:4566:4566 \ -p 127.0.0.1:4510-4559:4510-4559 \ -p 127.0.0.1:443:443 \ -e LOCALSTACK_AUTH_TOKEN=${LOCALSTACK_AUTH_TOKEN:?} \ localstack/snowflake -{{< / command >}} +``` ### Docker Compose @@ -72,9 +72,9 @@ services: Start the Snowflake Docker container with the following command: -{{< command >}} -$ docker-compose up -{{< / command >}} +```bash +docker-compose up +``` ## Updating @@ -86,10 +86,15 @@ To update the Snowflake Docker container, pull the latest image and restart the You can check if the Snowflake emulator is running by executing the following command: -{{< command >}} -$ curl -d '{}' snowflake.localhost.localstack.cloud:4566/session +```bash +curl -d '{}' snowflake.localhost.localstack.cloud:4566/session +``` + +The response should be: + +```bash {"success": true} -{{< / command >}} +``` ### How to enable detailed debug logs? @@ -98,19 +103,19 @@ You can set the `SF_LOG=trace` environment variable in the Snowflake container t When using `docker-compose` then simply add this variable to the `environment` section of the YAML configuration file. If you're starting up via the `localstack start` CLI, then make sure to start up via the following configuration: -{{< command >}} -$ DOCKER_FLAGS='-e SF_LOG=trace' DEBUG=1 IMAGE_NAME=localstack/snowflake localstack start -{{< / command >}} +```bash +DOCKER_FLAGS='-e SF_LOG=trace' DEBUG=1 IMAGE_NAME=localstack/snowflake localstack start +``` ### The `snowflake.localhost.localstack.cloud` hostname doesn't resolve on my machine, what can I do? On some systems, including some newer versions of MacOS, the domain name `snowflake.localhost.localstack.cloud` may not resolve properly. If you are encountering network issues and your Snowflake client drivers are unable to connect to the emulator, you may need to manually add the following entry to your `/etc/hosts` file: -{{< command >}} +```bash 127.0.0.1 snowflake.localhost.localstack.cloud -{{< / command >}} +``` ## Next steps -Now that the Snowflake emulator is installed, you can use it for developing and testing your Snowflake data pipelines. Refer to our [Quickstart](/aws/getting-started/quickstart) guide to get started. \ No newline at end of file +Now that the Snowflake emulator is installed, you can use it for developing and testing your Snowflake data pipelines. Refer to our [Quickstart](/aws/getting-started/quickstart) guide to get started. diff --git a/src/content/docs/snowflake/getting-started/quickstart.md b/src/content/docs/snowflake/getting-started/quickstart.md index 294fad8e..98d1012d 100644 --- a/src/content/docs/snowflake/getting-started/quickstart.md +++ b/src/content/docs/snowflake/getting-started/quickstart.md @@ -2,10 +2,10 @@ title: Quickstart description: Get started with LocalStack for Snowflake in a few simple steps template: doc +sidebar: + order: 2 --- - - ## Introduction This guide explains how to set up the Snowflake emulator and develop a Python program using the Snowflake Connector for Python (`snowflake-connector-python`) to interact with emulated Snowflake running on your local machine. @@ -13,7 +13,7 @@ This guide explains how to set up the Snowflake emulator and develop a Python pr ## Prerequisites - [`localstack` CLI](https://docs.localstack.cloud/getting-started/installation/#localstack-cli) -- [LocalStack for Snowflake]({{< ref "installation" >}}) +- [LocalStack for Snowflake](/snowflake/getting-started/) - Python 3.10 or later - [`snowflake-connector-python` library](https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-install) @@ -21,19 +21,22 @@ This guide explains how to set up the Snowflake emulator and develop a Python pr Before you begin, pull the Snowflake emulator image (`localstack/snowflake`) and start the container: -{{< command >}} -$ export LOCALSTACK_AUTH_TOKEN= -$ IMAGE_NAME=localstack/snowflake:latest localstack start -{{< / command >}} +```bash +export LOCALSTACK_AUTH_TOKEN= +IMAGE_NAME=localstack/snowflake:latest localstack start +``` Check the emulator's availability by running: -{{< command >}} -$ curl -d '{}' snowflake.localhost.localstack.cloud:4566/session - +```bash +curl -d '{}' snowflake.localhost.localstack.cloud:4566/session +``` + +The response should be: + +```bash {"success": true} - -{{< / command >}} +``` ### Connect to the Snowflake emulator @@ -90,9 +93,9 @@ This program creates a table named `ability`, inserts rows, and fetches the resu Execute the Python program with: -{{< command >}} -$ python main.py -{{< / command >}} +```bash +python main.py +``` The output should be: @@ -122,9 +125,9 @@ Verify the results by navigating to the LocalStack logs: To stop LocalStack and remove locally created resources, use: -{{< command >}} -$ localstack stop -{{< / command >}} +```bash +localstack stop +``` LocalStack is ephemeral and doesn't persist data across restarts. It runs inside a Docker container, and once it’s stopped, all locally created resources are automatically removed. In a future release of the Snowflake emulator, we will provide proper persistence and integration with our [Cloud Pods](https://docs.localstack.cloud/user-guide/state-management/cloud-pods/) feature as well. @@ -132,6 +135,6 @@ LocalStack is ephemeral and doesn't persist data across restarts. It runs inside You can now explore the following resources to learn more about the Snowflake emulator: -- [User Guide]({{< ref "user-guide" >}}): Learn about the Snowflake emulator's features and how to use them. -- [Tutorials](/aws/tutorials): Explore tutorials to use the Snowflake emulator for local development and testing. -- [References]({{< ref "references" >}}): Find information about the Snowflake emulator's configuration, changelog, and function coverage. \ No newline at end of file +- [Features](/snowflake/features/): Learn about the Snowflake emulator's features and how to use them. +- [SQL Functions](/snowflake/sql-functions): Learn about the SQL functions supported by the Snowflake emulator. +- [Changelog](/snowflake/changelog): Find information about the Snowflake emulator's changelog. diff --git a/src/content/docs/snowflake/index.md b/src/content/docs/snowflake/index.md index ee68d87d..d1104b29 100644 --- a/src/content/docs/snowflake/index.md +++ b/src/content/docs/snowflake/index.md @@ -1,28 +1,38 @@ --- -title: Welcome to LocalStack for Snowflake Docs -description: Get started with LocalStack Docs. +title: Introduction +description: LocalStack for Snowflake allows you to develop and test your Snowflake data pipelines entirely on your local machine! template: doc nav: 1 -label: Welcome +label: Introduction --- -# Welcome to LocalStack Docs! - -## What would you like to do today? - - [LocalStack](https://localstack.cloud/) is a cloud service emulator that runs in a single container on your laptop or in your CI environment. LocalStack for Snowflake emulates the functionality of a real Snowflake instance, allowing you to perform operations without an internet connection or a Snowflake account. This is valuable for locally developing and testing Snowflake data pipelines without incurring costs. The Snowflake emulator supports the following features: -* [**Basic operations** on warehouses, databases, schemas, and tables](https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-example) -* [**Storing files** in user/data/named **stages**](https://docs.snowflake.com/en/user-guide/data-load-local-file-system-create-stage) -* [**Snowpark** libraries](https://docs.snowflake.com/en/developer-guide/snowpark/python/index) -* [**Snowpipe** streaming with **Kafka connector**](https://docs.snowflake.com/en/user-guide/data-load-snowpipe-streaming-kafka) -* [**JavaScript and Python UDFs**](https://docs.snowflake.com/en/developer-guide/udf/javascript/udf-javascript-introduction) -* ... and more! +- [**DDL/DML/DQL operations**](https://docs.snowflake.com/en/sql-reference/sql-dml) on warehouses, databases, schemas, and tables +- [**Basic operations**](https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-example) for warehouse, database, schema, and table management +- [**Transaction management**](https://docs.snowflake.com/en/sql-reference/transactions) with full ACID compliance support +- [**Cross-database resource sharing**](https://docs.snowflake.com/en/user-guide/data-sharing-intro) for collaborative data access +- [**Storing files in Snowflake stages**](https://docs.snowflake.com/en/user-guide/data-load-local-file-system-create-stage) (user, data, and named stages) +- [**Storage integrations**](https://docs.snowflake.com/en/user-guide/data-load-s3-config-storage-integration) for external cloud storage connectivity +- [**Zero-copy cloning**](https://docs.snowflake.com/en/user-guide/object-clone) for efficient data duplication +- [**Materialized views**](https://docs.snowflake.com/en/user-guide/views-materialized) for pre-computed query results +- [**Iceberg tables**](https://docs.snowflake.com/en/user-guide/tables-iceberg) for open table format support +- [**Hybrid tables**](https://docs.snowflake.com/en/user-guide/tables-hybrid) combining row and column storage +- [**Dynamic tables**](https://docs.snowflake.com/en/user-guide/dynamic-tables-about) for automatically refreshed derived data +- [**Snowpark libraries**](https://docs.snowflake.com/en/developer-guide/snowpark/python/index) for DataFrame-based data processing +- [**User-defined functions (UDFs)**](https://docs.snowflake.com/en/developer-guide/udf/javascript/udf-javascript-introduction) in JavaScript, Python, and Java +- [**Snowpipe**](https://docs.snowflake.com/en/user-guide/data-load-snowpipe-intro) for continuous data ingestion +- [**Table streams**](https://docs.snowflake.com/en/user-guide/streams-intro) for change data capture (CDC) and audit logs +- [**Tasks**](https://docs.snowflake.com/en/user-guide/tasks-intro) for scheduled execution and workflow automation +- [**Row Access Policies**](https://docs.snowflake.com/en/user-guide/security-row-intro) for fine-grained data access control +- [**Running Streamlit apps locally**](https://docs.snowflake.com/en/developer-guide/streamlit/about-streamlit) for interactive data apps with [**Native Apps**](https://docs.snowflake.com/en/developer-guide/native-apps/native-apps-about) support +- [**Infrastructure-as-code with Terraform & Pulumi**](https://docs.snowflake.com/en/user-guide/ecosystem-terraform) for automated resource provisioning +- [**Polaris Catalog**](https://docs.snowflake.com/en/user-guide/polaris-getting-started) integration for open catalog management Integrating the Snowflake emulator into your existing CI/CD pipeline allows you to run integration tests and identify issues early, reducing surprises during production deployment. -Check our [SQL Functions Coverage]({{< ref "coverage-functions" >}}) and [Feature Coverage]({{< ref "coverage-features" >}}) pages for a comprehensive list of supported features. + +Check our [SQL Functions Coverage](/snowflake/sql-functions) and [Feature Coverage](/snowflake/coverage-features) pages for a comprehensive list of supported features. diff --git a/src/content/docs/snowflake/integrations/airflow.md b/src/content/docs/snowflake/integrations/airflow.md index 0c2d00f8..f15cdd0f 100644 --- a/src/content/docs/snowflake/integrations/airflow.md +++ b/src/content/docs/snowflake/integrations/airflow.md @@ -3,8 +3,6 @@ title: Airflow description: Use Airflow to run local ETL jobs against the Snowflake emulator --- - - ## Introduction Apache [Airflow](https://airflow.apache.org) is a platform for running data-centric workflows and scheduled compute jobs. @@ -17,19 +15,20 @@ On this page we outline how to set up the connection between local Airflow and t In order to create an Airflow environment in local MWAA, we can use the [`awslocal`](https://github.com/localstack/awscli-local) command: -{{< command>}} -$ awslocal s3 mb s3://my-mwaa-bucket -$ awslocal mwaa create-environment --dag-s3-path /dags \ +```bash +awslocal s3 mb s3://my-mwaa-bucket +awslocal mwaa create-environment --dag-s3-path /dags \ --execution-role-arn arn:aws:iam::000000000000:role/airflow-role \ --network-configuration {} \ --source-bucket-arn arn:aws:s3:::my-mwaa-bucket \ --airflow-version 2.6.3 \ --name my-mwaa-env -{{< /command >}} +``` ## Create an Airflow DAG script that connects to LocalStack Snowflake We can then create a local file `my_dag.py` with the Airflow DAG definition, for example: + ```python import datetime import json @@ -85,6 +84,7 @@ In order to use the `SnowflakeOperator` in your Airflow DAG, a small patch is re The code listings below contain the patch for different Airflow versions - simply copy the relevant snippet and paste it into the top of your DAG script (e.g., `my_dag.py`). **Airflow version 2.6.3 and above**: + ```python # --- # patch for local Snowflake connection, for Airflow 2.6.3 and above @@ -108,6 +108,7 @@ SnowflakeHook._get_conn_params = _get_conn_params ``` **Airflow version 2.9.2 and above**: + ```python # --- # patch for local Snowflake connection, for Airflow 2.9.2 / 2.10.1 @@ -131,15 +132,16 @@ SnowflakeHook._get_conn_params = _get_conn_params # ... rest of your DAG script below ... ``` -{{< alert type="info" title="Note" >}} +:::note In a future release, we're looking to integrate these patches directly into the LocalStack environment, such that users do not need to apply these patches in DAG scripts manually. -{{< /alert >}} +::: ## Deploying the DAG to Airflow Next, we copy the `my_dag.py` file to the `/dags` folder within the `my-mwaa-bucket` S3 bucket, to trigger the deployment of the DAG in Airflow: -{{< command>}} -$ awslocal s3 cp my_dag.py s3://my-mwaa-bucket/dags/ -{{< /command >}} -You should then be able to open the Airflow UI (e.g., http://localhost.localstack.cloud:4510/dags) to view the status of the DAG and trigger a DAG run. \ No newline at end of file +```bash +awslocal s3 cp my_dag.py s3://my-mwaa-bucket/dags/ +``` + +You should then be able to open the Airflow UI (e.g., http://localhost.localstack.cloud:4510/dags) to view the status of the DAG and trigger a DAG run. diff --git a/src/content/docs/snowflake/integrations/continuous-integration.md b/src/content/docs/snowflake/integrations/continuous-integration.mdx similarity index 93% rename from src/content/docs/snowflake/integrations/continuous-integration.md rename to src/content/docs/snowflake/integrations/continuous-integration.mdx index a4df6365..405c1e69 100644 --- a/src/content/docs/snowflake/integrations/continuous-integration.md +++ b/src/content/docs/snowflake/integrations/continuous-integration.mdx @@ -3,7 +3,7 @@ title: Continuous Integration description: Get started with Snowflake emulator in continuous integration (CI) environments. --- - +import { Tabs, TabItem } from '@astrojs/starlight/components'; ## Introduction @@ -26,8 +26,9 @@ To create a CI key, follow these steps: The following examples demonstrate how to set up the emulator in GitHub Actions, CircleCI, and GitLab CI. -{{< tabpane >}} -{{< tab header="GitHub Actions" lang="yaml" >}} + + +```yaml name: LocalStack Test on: [ push, pull_request ] @@ -48,8 +49,10 @@ jobs: echo "Startup complete" env: LOCALSTACK_API_KEY: ${{ secrets.LOCALSTACK_API_KEY }} -{{< /tab >}} -{{< tab header="CircleCI" lang="yaml" >}} +``` + + +```yaml version: 2.1 orbs: @@ -79,8 +82,10 @@ workflows: build: jobs: - example-job -{{< /tab >}} -{{< tab header="GitLab CI" lang="yaml" >}} +``` + + +```yaml image: docker:20.10.16 stages: @@ -108,5 +113,6 @@ test: - echo "${dind_ip} localhost.localstack.cloud " >> /etc/hosts - DOCKER_HOST="tcp://${dind_ip}:2375" IMAGE_NAME=localstack/snowflake localstack start -d - localstack wait -t 15 -{{< /tab >}} -{{< /tabpane >}} \ No newline at end of file +``` + + diff --git a/src/content/docs/snowflake/integrations/dbeaver.md b/src/content/docs/snowflake/integrations/dbeaver.md index 89336ad5..8ca9b135 100644 --- a/src/content/docs/snowflake/integrations/dbeaver.md +++ b/src/content/docs/snowflake/integrations/dbeaver.md @@ -3,8 +3,6 @@ title: DBeaver description: Use DBeaver to interact with the Snowflake emulator --- - - ## Introduction [DBeaver](https://dbeaver.io/) is a free and open-source universal database tool for developers, database administrators, and analysts. DBeaver provides a wide range of features, such as executing SQL statements, viewing and editing data, managing database objects, and more. @@ -28,10 +26,8 @@ To create a new connection in DBeaver, follow these steps: - **Host**: `snowflake.localhost.localstack.cloud` - **User**: `test` - **Password**: `test` - New connection in DBeaver -

- + ![New connection in DBeaver](/images/snowflake/dbeaver-new-connection.png) - Click **Test Connection**. - If the connection test succeeds, click **Finish**. The Snowflake database will appear in DBeaver's Database Navigator. -You can verify the connection by running a query to check the Snowflake version: `SELECT CURRENT_VERSION();` \ No newline at end of file +You can verify the connection by running a query to check the Snowflake version: `SELECT CURRENT_VERSION();` diff --git a/src/content/docs/snowflake/integrations/dbt.md b/src/content/docs/snowflake/integrations/dbt.md index 03044b8e..7b814b05 100644 --- a/src/content/docs/snowflake/integrations/dbt.md +++ b/src/content/docs/snowflake/integrations/dbt.md @@ -3,8 +3,6 @@ title: dbt description: Use dbt to interact with the Snowflake emulator --- - - ## Introduction [dbt (data build tool)](https://www.getdbt.com/) is a transformation workflow tool that enables data analysts and engineers to transform data in their warehouses by writing modular SQL. dbt handles version control, documentation, and modularity for data transformations. @@ -19,9 +17,9 @@ In this guide, you will learn how to configure dbt to interact with the Snowflak First, install dbt with the Snowflake adapter: -{{< command >}} -$ pip install dbt-snowflake -{{< /command >}} +```bash +pip install dbt-snowflake +``` ### Configure dbt Profile @@ -49,9 +47,9 @@ localstack_snowflake: To verify your dbt configuration is working correctly with the Snowflake emulator, run: -{{< command >}} -$ dbt debug --profile localstack_snowflake -{{< /command >}} +```bash +dbt debug --profile localstack_snowflake +``` You should see output indicating a successful connection to the Snowflake emulator. @@ -96,14 +94,15 @@ models: - not_null ``` -{{< command >}} +You can run all models and tests with the following commands: + +```bash # Run all models -$ dbt run --profile localstack_snowflake +dbt run --profile localstack_snowflake # Run tests -$ dbt test --profile localstack_snowflake - -{{< /command >}} +dbt test --profile localstack_snowflake +``` ### Project Structure @@ -149,6 +148,6 @@ models: 3. **Documentation**: Document your models using dbt's built-in documentation features 4. **Modularity**: Break down complex transformations into smaller, reusable models -{{< alert type="info" >}} +:::note It's a good practice to always test your dbt models locally with the Snowflake emulator before deploying to production, to save time and resources. -{{< /alert >}} +::: diff --git a/src/content/docs/snowflake/integrations/flyway.md b/src/content/docs/snowflake/integrations/flyway.md index 67fb0000..6cea382b 100644 --- a/src/content/docs/snowflake/integrations/flyway.md +++ b/src/content/docs/snowflake/integrations/flyway.md @@ -3,8 +3,6 @@ title: Flyway description: Use Flyway to interact with the Snowflake emulator --- - - ## Introduction [Flyway](https://flywaydb.org/) is an open-source database migration tool that simplifies the process of managing and applying database migrations. Flyway supports various databases, including Snowflake, allowing you to manage database schema changes, version control, and data migration in a structured and automated way. @@ -37,4 +35,4 @@ To connect Flyway to the Snowflake emulator, follow these steps: * Enter JDBC URL as `jdbc:snowflake://http://snowflake.localhost.localstack.cloud:4566/?db=test&schema=PUBLIC&JDBC_QUERY_RESULT_FORMAT=JSON`. * Click on **Test connection**. -If the connection test succeeds, you can start applying database migrations using Flyway. \ No newline at end of file +If the connection test succeeds, you can start applying database migrations using Flyway. diff --git a/src/content/docs/snowflake/integrations/pulumi.md b/src/content/docs/snowflake/integrations/pulumi.md index 8ef2228d..91b20968 100644 --- a/src/content/docs/snowflake/integrations/pulumi.md +++ b/src/content/docs/snowflake/integrations/pulumi.md @@ -3,8 +3,6 @@ title: Pulumi description: Use Pulumi to interact with the Snowflake emulator --- - - ## Introduction [Pulumi](https://pulumi.com/) is an Infrastructure-as-Code (IaC) framework that allows you to define and provision infrastructure using familiar programming languages. Pulumi supports a wide range of cloud providers and services, including AWS, Azure, Google Cloud, and more. @@ -19,19 +17,19 @@ In this guide, you will learn how to configure Pulumi to interact with the Snowf To use Pulumi with the Snowflake emulator, you need to configure the Snowflake provider in your Pulumi configuration file. Create a blank Pulumi project, and add the following environment variables to your Pulumi stack: -{{< command>}} -$ pulumi config set snowflake:account test -$ pulumi config set snowflake:region test -$ pulumi config set snowflake:username test -$ pulumi config set snowflake:password test -$ pulumi config set snowflake:host snowflake.localhost.localstack.cloud -{{< /command >}} +```bash +pulumi config set snowflake:account test +pulumi config set snowflake:region test +pulumi config set snowflake:username test +pulumi config set snowflake:password test +pulumi config set snowflake:host snowflake.localhost.localstack.cloud +``` You can install the Snowflake provider in any of the programming languages supported by Pulumi, such as Python, JavaScript, TypeScript, and Go. The following example shows how to install the Snowflake provider for your TypeScript project: -{{< command >}} -$ npm install @pulumi/snowflake -{{< /command >}} +```bash +npm install @pulumi/snowflake +``` ### Create Snowflake resources @@ -50,9 +48,9 @@ const simple = new snowflake.Database("simple", { You can now deploy the Pulumi configuration to create the Snowflake resources locally. Run the following command to deploy the Pulumi configuration: -{{< command >}} -$ pulumi up -{{< /command >}} +```bash +pulumi up +``` The expected output should show the resources being created in the Snowflake emulator: @@ -75,4 +73,4 @@ Resources: + 2 created Duration: 5s -``` \ No newline at end of file +``` diff --git a/src/content/docs/snowflake/integrations/snow-cli.md b/src/content/docs/snowflake/integrations/snow-cli.mdx similarity index 53% rename from src/content/docs/snowflake/integrations/snow-cli.md rename to src/content/docs/snowflake/integrations/snow-cli.mdx index 6bd1196d..dd4a2057 100644 --- a/src/content/docs/snowflake/integrations/snow-cli.md +++ b/src/content/docs/snowflake/integrations/snow-cli.mdx @@ -1,52 +1,52 @@ --- -title: Snow CLI -description: Use Snow CLI to interact with the Snowflake emulator. +title: Snowflake CLI +description: Use Snowflake CLI to interact with the Snowflake emulator. --- - +import { Tabs, TabItem } from '@astrojs/starlight/components'; ## Introduction -Snow CLI is a command-line interface (CLI) for Snowflake. You can use Snow CLI to interact with the Snowflake emulator. Snow CLI provides a set of commands to manage and interact with Snowflake accounts, databases, warehouses, and more. - -You can connect Snow CLI to the Snowflake emulator using a connection profile. A connection profile is a set of parameters that define the connection to a Snowflake account. You can create, list, and test connection profiles using Snow CLI. +Snowflake CLI is a command-line interface (CLI) for Snowflake. You can use Snowflake CLI to interact with the Snowflake emulator. Snowflake CLI provides a set of commands to manage and interact with Snowflake accounts, databases, warehouses, and more. -{{}} -Snow CLI is still under [active development](https://docs.snowflake.com/LIMITEDACCESS/snowcli/snowcli-guide), hence the commands and features might change in future releases. -{{}} +You can connect Snowflake CLI to the Snowflake emulator using a connection profile. A connection profile is a set of parameters that define the connection to a Snowflake account. You can create, list, and test connection profiles using Snowflake CLI. ## Installation -You can install Snow CLI using the following methods: +You can install Snowflake CLI using the following methods: -{{< tabpane >}} -{{< tab header="PyPI" lang="bash" >}} + + +```bash pip install snowflake-cli-labs snow --help -{{< /tab >}} -{{< tab header="Homebrew" lang="bash" >}} +``` + + +```bash brew tap Snowflake-Labs/snowflake-cli brew install snowcli snow --help -{{< /tab >}} -{{< /tabpane >}} +``` + + -## Configuring Snow CLI +## Configuring Snowflake CLI -In this guide, you will learn how to configure Snow CLI to interact with the Snowflake emulator using a `localstack` connection profile. +In this guide, you will learn how to configure Snowflake CLI to interact with the Snowflake emulator using a `localstack` connection profile. ### Create a connection profile -To configure Snow CLI to interact with the Snowflake emulator, create a connection profile using the following command: +To configure Snowflake CLI to interact with the Snowflake emulator, create a connection profile using the following command: -{{< command >}} -$ snow connection add \ +```bash +snow connection add \ --connection-name localstack \ --user test \ --password test \ --account test \ --host snowflake.localhost.localstack.cloud -{{< / command >}} +``` You might be prompted to enter more optional parameters, such as the connection port, database name, warehouse name, authentication method, and more. These are however optional and can be skipped. @@ -54,11 +54,11 @@ After a successful configuration, you can the `localstack` connection profile is ### List your connection profiles -To list all the connection profiles configured in Snow CLI, execute the following command: +To list all the connection profiles configured in Snowflake CLI, execute the following command: -{{< command >}} -$ snow connection list -{{< / command >}} +```bash +snow connection list +``` The output should be: @@ -75,9 +75,9 @@ The output should be: To test the connection to the Snowflake emulator, execute the following command: -{{< command >}} -$ snow connection test --connection localstack -{{< / command >}} +```bash +snow connection test --connection localstack +``` The output should be: @@ -97,18 +97,18 @@ The output should be: To run a query using the connection profile, execute the following command: -{{< command >}} -$ snow sql --query "CREATE DATABASE mytestdb;" --connection localstack -{{< / command >}} +```bash +snow sql --query "CREATE DATABASE mytestdb;" --connection localstack +``` You can see all the databases in your Snowflake emulator using the following command: -{{< command >}} -$ snow sql --query "SHOW DATABASES;" --connection localstack -{{< / command >}} +```bash +snow sql --query "SHOW DATABASES;" --connection localstack +``` You can create a schema using the following commands: -{{< command >}} -$ snow sql --query "CREATE SCHEMA mytestdb.mytestschema;" --connection localstack -{{< / command >}} \ No newline at end of file +```bash +snow sql --query "CREATE SCHEMA mytestdb.mytestschema;" --connection localstack +``` diff --git a/src/content/docs/snowflake/integrations/snow-sql.md b/src/content/docs/snowflake/integrations/snow-sql.md index 21834c50..b6718c47 100644 --- a/src/content/docs/snowflake/integrations/snow-sql.md +++ b/src/content/docs/snowflake/integrations/snow-sql.md @@ -3,8 +3,6 @@ title: SnowSQL description: Use SnowSQL to interact with the Snowflake emulator --- - - ## Introduction [SnowSQL](https://docs.snowflake.com/en/user-guide/snowsql.html) is a command-line client for Snowflake that allows you to interact with the Snowflake service using SQL commands. SnowSQL provides a wide range of features, such as executing SQL statements, loading data, unloading data, and more. @@ -23,7 +21,7 @@ To install SnowSQL, follow the instructions in the [official SnowSQL documentati To start SnowSQL, execute the following command: -{{< command >}} +```bash $ export SNOWSQL_PWD=test $ snowsql \ -a test \ @@ -34,7 +32,7 @@ $ snowsql \ -w test \ -r test \ -s test -{{< / command >}} +``` In the above command: @@ -59,14 +57,12 @@ test#test@test.test> You can execute SQL commands using SnowSQL. For example, to create a new database, execute the following command: -{{< command >}} +```bash $ CREATE DATABASE test_db; - +----------------------------------------+ | status | |----------------------------------------| | Database TEST_DB successfully created. | +----------------------------------------+ 0 Row(s) produced. Time Elapsed: 0.198s - -{{< / command >}} \ No newline at end of file +``` \ No newline at end of file diff --git a/src/content/docs/snowflake/integrations/snowflake-drivers.md b/src/content/docs/snowflake/integrations/snowflake-drivers.md index 70c66683..d2ff9604 100644 --- a/src/content/docs/snowflake/integrations/snowflake-drivers.md +++ b/src/content/docs/snowflake/integrations/snowflake-drivers.md @@ -3,8 +3,6 @@ title: Snowflake Drivers description: Get started with Snowflake Drivers in LocalStack for Snowflake --- - - ## Introduction Snowflake Drivers enable the use of programming languages like Go, C#, and Python for developing applications that interact with Snowflake. The Snowflake emulator facilitates testing Snowflake integration without connecting to the actual Snowflake instance. This guide provides instructions on connecting the Snowflake emulator with various drivers. @@ -15,9 +13,9 @@ The Snowflake Connector for Python (`snowflake-connector-python`) is a Python li To install the Snowflake Connector for Python, execute the following command: -{{< command >}} -$ pip install snowflake-connector-python -{{< /command >}} +```bash +pip install snowflake-connector-python +``` The Snowflake emulator operates on `snowflake.localhost.localstack.cloud` - note that this is a DNS name that resolves to a local IP address (`127.0.0.1`) to make sure the connector interacts with the local APIs. Connect to the emulator using the following Python code: @@ -48,9 +46,9 @@ The Snowflake Node.js driver facilitates connecting to Snowflake and executing o To install the Snowflake Node.js driver, execute the following command: -{{< command >}} -$ npm install snowflake-sdk -{{< /command >}} +```bash +npm install snowflake-sdk +``` The Snowflake emulator runs on `snowflake.localhost.localstack.cloud`. Connect to the emulator using the following JavaScript code: @@ -95,9 +93,9 @@ The Go Snowflake driver provides a way to connect to Snowflake and perform datab To install the Go Snowflake driver, execute the following command: -{{< command >}} -$ go get github.com/snowflakedb/gosnowflake -{{< /command >}} +```bash +go get github.com/snowflakedb/gosnowflake +``` The connection string follows the format `username:password@host:port/database?account=account_name`. For the emulator use: `test:test@snowflake.localhost.localstack.cloud:4566/test?account=test` @@ -152,4 +150,4 @@ func main() { log.Fatalf("Error iterating rows: %v", err) } } -``` \ No newline at end of file +``` diff --git a/src/content/docs/snowflake/integrations/snowpark.md b/src/content/docs/snowflake/integrations/snowpark.md index 663ba50b..d2748119 100644 --- a/src/content/docs/snowflake/integrations/snowpark.md +++ b/src/content/docs/snowflake/integrations/snowpark.md @@ -3,8 +3,6 @@ title: Snowpark description: Get started with Snowpark in LocalStack for Snowflake --- - - ## Introduction Snowpark library is a developer library for querying and processing data at scale in Snowflake. Snowflake currently provides Snowpark libraries for three languages: Java, Python, and Scala. The Snowflake emulator facilitates testing Snowpark queries without connecting to the actual Snowflake instance. This guide provides instructions on using the Snowflake emulator in conjunction with Snowpark. @@ -13,9 +11,9 @@ Snowpark library is a developer library for querying and processing data at scal The Snowflake emulator supports the development and testing of Snowpark Python code in a local development environment. You can install the Snowpark Python library using the following command: -{{< command >}} -$ pip install snowflake-snowpark-python -{{< /command >}} +```bash +pip install snowflake-snowpark-python +``` In this getting started guide, we'll use the Snowpark Python library to establish a connection to the Snowflake emulator and employ a DataFrame to query a table named `sample_product_data`. @@ -150,4 +148,4 @@ The following output should be displayed: |9 |Product 3B |prod-3-B | |10 |Product 4 |prod-4 | --------------------------------------- -``` \ No newline at end of file +``` diff --git a/src/content/docs/snowflake/integrations/terraform.md b/src/content/docs/snowflake/integrations/terraform.md index fcb44b7a..253e9142 100644 --- a/src/content/docs/snowflake/integrations/terraform.md +++ b/src/content/docs/snowflake/integrations/terraform.md @@ -3,8 +3,6 @@ title: Terraform description: Use Terraform to interact with the Snowflake emulator --- - - ## Introduction [Terraform](https://terraform.io/) is an Infrastructure-as-Code (IaC) framework developed by HashiCorp. It enables users to define and provision infrastructure using a high-level configuration language. Terraform uses HashiCorp Configuration Language (HCL) as its configuration syntax. @@ -38,12 +36,13 @@ provider "snowflake" { } ``` -{{< alert type="info" title="Note" >}} +:::note Instead of manually specifying the `host`, you can export the `SNOWFLAKE_HOST` environment variable to set the Snowflake host. Here is an example: -{{< command >}} -$ export SNOWFLAKE_HOST=snowflake.localhost.localstack.cloud -{{< /command >}} -{{< /alert >}} + +```bash +export SNOWFLAKE_HOST=snowflake.localhost.localstack.cloud +``` +::: ### Create Snowflake resources @@ -61,9 +60,9 @@ resource "snowflake_database" "example" { You can now deploy the Terraform configuration using the following command: -{{< command >}} -$ terraform init -$ terraform apply -{{< /command >}} +```bash +terraform init +terraform apply +``` -The `terraform init` command initializes the Terraform configuration, and the `terraform apply` command creates the Snowflake database. \ No newline at end of file +The `terraform init` command initializes the Terraform configuration, and the `terraform apply` command creates the Snowflake database. diff --git a/src/content/docs/snowflake/sql-functions.md b/src/content/docs/snowflake/sql-functions.md index 922da457..be0bd6cb 100644 --- a/src/content/docs/snowflake/sql-functions.md +++ b/src/content/docs/snowflake/sql-functions.md @@ -4,8 +4,825 @@ description: This is a dummy description template: doc --- -# SQL Functions Coverage +This table provides a list of all Snowflake system-defined SQL functions, scalar or table, emulated by LocalStack. +The content will be updated as additional query features and functions are implemented. -Browse implemented SQL functions coverage in the following table. - -Look up all Snowflake system-defined SQL functions, scalar or table, emulated by LocalStack. \ No newline at end of file +|Function|Supported| +|----|----| +|ABS|✅| +|ACOS|✅| +|ACOSH|✅| +|ADD_MONTHS|✅| +|AI_AGG|❓| +|AI_CLASSIFY|❓| +|AI_COMPLETE (Prompt object)|❓| +|AI_COMPLETE (Single image)|❓| +|AI_COMPLETE (Single string)|❓| +|AI_COMPLETE|❓| +|AI_FILTER|❓| +|AI_SIMILARITY|❓| +|AI_SUMMARIZE_AGG|❓| +|ALERT_HISTORY|❓| +|ALL_USER_NAMES|❓| +|ANY_VALUE|✅| +|APPLICATION_JSON|❓| +|APPROX_COUNT_DISTINCT|❓| +|APPROX_PERCENTILE|❓| +|APPROX_PERCENTILE_ACCUMULATE|❓| +|APPROX_PERCENTILE_COMBINE|❓| +|APPROX_PERCENTILE_ESTIMATE|❓| +|APPROX_TOP_K|❓| +|APPROX_TOP_K_ACCUMULATE|❓| +|APPROX_TOP_K_COMBINE|❓| +|APPROX_TOP_K_ESTIMATE|❓| +|APPROXIMATE_JACCARD_INDEX|❓| +|APPROXIMATE_SIMILARITY|❓| +|ARRAY_AGG|✅| +|ARRAY_APPEND|✅| +|ARRAY_CAT|✅| +|ARRAY_COMPACT|✅| +|ARRAY_CONSTRUCT|✅| +|ARRAY_CONSTRUCT_COMPACT|✅| +|ARRAY_CONTAINS|✅| +|ARRAY_DISTINCT|✅| +|ARRAY_EXCEPT|✅| +|ARRAY_FLATTEN|✅| +|ARRAY_GENERATE_RANGE|✅| +|ARRAY_INSERT|✅| +|ARRAY_INTERSECTION|✅| +|ARRAY_MAX|✅| +|ARRAY_MIN|✅| +|ARRAY_POSITION|✅| +|ARRAY_PREPEND|✅| +|ARRAY_REMOVE|✅| +|ARRAY_REMOVE_AT|✅| +|ARRAY_REVERSE|✅| +|ARRAY_SIZE|✅| +|ARRAY_SLICE|✅| +|ARRAY_SORT|✅| +|ARRAY_TO_STRING|✅| +|ARRAY_UNION_AGG|✅| +|ARRAY_UNIQUE_AGG|✅| +|ARRAYS_OVERLAP|✅| +|ARRAYS_TO_OBJECT|✅| +|ARRAYS_ZIP|❓| +|AS_|❓| +|AS_ARRAY|✅| +|AS_BINARY|✅| +|AS_BOOLEAN|✅| +|AS_CHAR, AS_VARCHAR|✅| +|AS_DATE|❓| +|AS_DECIMAL, AS_NUMBER|✅| +|AS_DOUBLE, AS_REAL|✅| +|AS_INTEGER|✅| +|AS_OBJECT|❓| +|AS_TIME|❓| +|AS_TIMESTAMP_*|❓| +|ASCII|✅| +|ASIN|✅| +|ASINH|✅| +|ATAN|✅| +|ATAN2|✅| +|ATANH|✅| +|AUTO_REFRESH_REGISTRATION_HISTORY|❓| +|AUTOMATIC_CLUSTERING_HISTORY|❓| +|AVAILABLE_LISTING_REFRESH_HISTORY|❓| +|AVG|✅| +|BASE64_DECODE_BINARY|❓| +|BASE64_DECODE_STRING|❓| +|BASE64_ENCODE|❓| +|[ NOT ] BETWEEN|❓| +|BIT_LENGTH|❓| +|BITAND|✅| +|BITAND_AGG|❓| +|BITMAP_BIT_POSITION|❓| +|BITMAP_BUCKET_NUMBER|❓| +|BITMAP_CONSTRUCT_AGG|❓| +|BITMAP_COUNT|❓| +|BITMAP_OR_AGG|❓| +|BITNOT|✅| +|BITOR|✅| +|BITOR_AGG|❓| +|BITSHIFTLEFT|✅| +|BITSHIFTRIGHT|✅| +|BITXOR|✅| +|BITXOR_AGG|❓| +|BOOLAND|❓| +|BOOLAND_AGG|❓| +|BOOLNOT|❓| +|BOOLOR|❓| +|BOOLOR_AGG|❓| +|BOOLXOR|❓| +|BOOLXOR_AGG|❓| +|BUILD_SCOPED_FILE_URL|❓| +|BUILD_STAGE_FILE_URL|❓| +|CASE|✅| +|CAST, ::|✅| +|CBRT|✅| +|CEIL|✅| +|CHARINDEX|❓| +|CHECK_JSON|❓| +|CHECK_XML|✅| +|CHR, CHAR|✅| +|CLASSIFY_TEXT (SNOWFLAKE.CORTEX)|❓| +|COALESCE|✅| +|COLLATE|❓| +|COLLATION|❓| +|COMPLETE (SNOWFLAKE.CORTEX) (multimodal)|❓| +|COMPLETE (SNOWFLAKE.CORTEX)|❓| +|COMPLETE_TASK_GRAPHS|❓| +|COMPRESS|❓| +|CONCAT, |||✅| +|CONCAT_WS|❓| +|CONDITIONAL_CHANGE_EVENT|❓| +|CONDITIONAL_TRUE_EVENT|❓| +|CONTAINS|✅| +|CONVERT_TIMEZONE|✅| +|COPY_HISTORY|❓| +|CORR|❓| +|CORTEX_SEARCH_DATA_SCAN|❓| +|COS|✅| +|COSH|✅| +|COT|✅| +|COUNT|✅| +|COUNT_IF|✅| +|COUNT_TOKENS (SNOWFLAKE.CORTEX)|❓| +|COVAR_POP|❓| +|COVAR_SAMP|❓| +|CUME_DIST|❓| +|CUMULATIVE_PRIVACY_LOSSES|❓| +|CURRENT_ACCOUNT|✅| +|CURRENT_ACCOUNT_NAME|✅| +|CURRENT_AVAILABLE_ROLES|✅| +|CURRENT_CLIENT|✅| +|CURRENT_DATABASE|✅| +|CURRENT_DATE|✅| +|CURRENT_IP_ADDRESS|✅| +|CURRENT_ORGANIZATION_NAME|✅| +|CURRENT_ORGANIZATION_USER|❓| +|CURRENT_REGION|✅| +|CURRENT_ROLE|✅| +|CURRENT_ROLE_TYPE|✅| +|CURRENT_SCHEMA|✅| +|CURRENT_SCHEMAS|✅| +|CURRENT_SECONDARY_ROLES|✅| +|CURRENT_SESSION|✅| +|CURRENT_STATEMENT|✅| +|CURRENT_TASK_GRAPHS|❓| +|CURRENT_TIME|✅| +|CURRENT_TIMESTAMP|✅| +|CURRENT_TRANSACTION|✅| +|CURRENT_USER|✅| +|CURRENT_VERSION|✅| +|CURRENT_WAREHOUSE|✅| +|DATA_METRIC_FUNCTION_REFERENCES|❓| +|DATA_QUALITY_MONITORING_RESULTS|❓| +|DATA_TRANSFER_HISTORY|❓| +|DATABASE_REFRESH_HISTORY|❓| +|DATABASE_REFRESH_PROGRESS, DATABASE_REFRESH_PROGRESS_BY_JOB|❓| +|DATABASE_REPLICATION_USAGE_HISTORY|❓| +|DATABASE_STORAGE_USAGE_HISTORY|❓| +|DATASKETCHES_HLL|❓| +|DATASKETCHES_HLL_ACCUMULATE|❓| +|DATASKETCHES_HLL_COMBINE|❓| +|DATASKETCHES_HLL_ESTIMATE|❓| +|DATE_FROM_PARTS|✅| +|DATE_PART|❓| +|DATE_TRUNC|✅| +|DATEADD|✅| +|DATEDIFF|✅| +|DAYNAME|✅| +|DECODE|❓| +|DECOMPRESS_BINARY|❓| +|DECOMPRESS_STRING|❓| +|DECRYPT|❓| +|DECRYPT_RAW|❓| +|DEGREES|✅| +|DENSE_RANK|✅| +|DIV0|✅| +|DIV0NULL|✅| +|AVG (system data metric function)|❓| +|BLANK_COUNT (system data metric function)|❓| +|BLANK_PERCENT (system data metric function)|❓| +|DATA_METRIC_SCHEDULED_TIME (system data metric function)|❓| +|DUPLICATE_COUNT (system data metric function)|❓| +|FRESHNESS (system data metric function)|❓| +|MAX (system data metric function)|❓| +|MIN (system data metric function)|❓| +|NULL_COUNT (system data metric function)|❓| +|NULL_PERCENT (system data metric function)|❓| +|ROW_COUNT (system data metric function)|❓| +|STDDEV (system data metric function)|❓| +|UNIQUE_COUNT (system data metric function)|❓| +|DP_INTERVAL_HIGH|❓| +|DP_INTERVAL_LOW|❓| +|DYNAMIC_TABLE_GRAPH_HISTORY|❓| +|DYNAMIC_TABLE_REFRESH_HISTORY|❓| +|DYNAMIC_TABLES|❓| +|EDITDISTANCE|❓| +|EMAIL_INTEGRATION_CONFIG|❓| +|EMBED_TEXT_768 (SNOWFLAKE.CORTEX)|❓| +|EMBED_TEXT_1024 (SNOWFLAKE.CORTEX)|❓| +|ENCRYPT|❓| +|ENCRYPT_RAW|❓| +|ENDSWITH|❓| +|ENTITY_SENTIMENT (SNOWFLAKE.CORTEX)|❓| +|EQUAL_NULL|✅| +|ESTIMATE_REMAINING_DP_AGGREGATES|❓| +|EXP|✅| +|EXPLAIN_JSON|❓| +|EXTERNAL_FUNCTIONS_HISTORY|❓| +|EXTERNAL_TABLE_FILES|❓| +|EXTERNAL_TABLE_FILE_REGISTRATION_HISTORY|❓| +|EXTRACT|❓| +|EXTRACT_ANSWER (SNOWFLAKE.CORTEX)|❓| +|EXTRACT_SEMANTIC_CATEGORIES|❓| +|FACTORIAL|❓| +|FILTER|❓| +|FINETUNE ('CANCEL') (SNOWFLAKE.CORTEX)|❓| +|FINETUNE ('CREATE') (SNOWFLAKE.CORTEX)|❓| +|FINETUNE ('DESCRIBE') (SNOWFLAKE.CORTEX)|❓| +|FINETUNE ('SHOW') (SNOWFLAKE.CORTEX)|❓| +|FINETUNE (SNOWFLAKE.CORTEX)|❓| +|FIRST_VALUE|❓| +|FL_GET_CONTENT_TYPE|❓| +|FL_GET_ETAG|❓| +|FL_GET_FILE_TYPE|❓| +|FL_GET_LAST_MODIFIED|❓| +|FL_GET_RELATIVE_PATH|❓| +|FL_GET_SCOPED_FILE_URL|❓| +|FL_GET_SIZE|❓| +|FL_GET_STAGE|❓| +|FL_GET_STAGE_FILE_URL|❓| +|FL_IS_AUDIO|❓| +|FL_IS_COMPRESSED|❓| +|FL_IS_DOCUMENT|❓| +|FL_IS_IMAGE|❓| +|FL_IS_VIDEO|❓| +|FLATTEN|✅| +|FLOOR|✅| +|GENERATE_COLUMN_DESCRIPTION|❓| +|GENERATOR|✅| +|GET|✅| +|GET_ABSOLUTE_PATH|❓| +|GET_ANACONDA_PACKAGES_REPODATA|❓| +|GET_CONDITION_QUERY_UUID|❓| +|GET_CONTACTS|❓| +|GET_DDL|❓| +|GET_IGNORE_CASE|❓| +|GET_LINEAGE (SNOWFLAKE.CORE)|❓| +|GET_OBJECT_REFERENCES|❓| +|GET_PATH, :|✅| +|GET_PRESIGNED_URL|❓| +|GET_PYTHON_PROFILER_OUTPUT (SNOWFLAKE.CORE)|❓| +|GET_QUERY_OPERATOR_STATS|❓| +|GET_RELATIVE_PATH|❓| +|GET_STAGE_LOCATION|❓| +|GETBIT|❓| +|GETDATE|❓| +|GETVARIABLE|❓| +|GREATEST|✅| +|GREATEST_IGNORE_NULLS|✅| +|GROUPING|❓| +|GROUPING_ID|❓| +|H3_CELL_TO_BOUNDARY|❓| +|H3_CELL_TO_CHILDREN|❓| +|H3_CELL_TO_CHILDREN_STRING|❓| +|H3_CELL_TO_PARENT|❓| +|H3_CELL_TO_POINT|❓| +|H3_COMPACT_CELLS|❓| +|H3_COMPACT_CELLS_STRINGS|❓| +|H3_COVERAGE|❓| +|H3_COVERAGE_STRINGS|❓| +|H3_GET_RESOLUTION|❓| +|H3_GRID_DISK|❓| +|H3_GRID_DISTANCE|❓| +|H3_GRID_PATH|❓| +|H3_INT_TO_STRING|❓| +|H3_IS_PENTAGON|❓| +|H3_IS_VALID_CELL|❓| +|H3_LATLNG_TO_CELL|❓| +|H3_LATLNG_TO_CELL_STRING|❓| +|H3_POINT_TO_CELL|❓| +|H3_POINT_TO_CELL_STRING|❓| +|H3_POLYGON_TO_CELLS|❓| +|H3_POLYGON_TO_CELLS_STRINGS|❓| +|H3_STRING_TO_INT|❓| +|H3_TRY_COVERAGE|❓| +|H3_TRY_COVERAGE_STRINGS|❓| +|H3_TRY_GRID_DISTANCE|❓| +|H3_TRY_GRID_PATH|❓| +|H3_TRY_POLYGON_TO_CELLS|❓| +|H3_TRY_POLYGON_TO_CELLS_STRINGS|❓| +|H3_UNCOMPACT_CELLS|❓| +|H3_UNCOMPACT_CELLS_STRINGS|❓| +|HASH|✅| +|HASH_AGG|❓| +|HAVERSINE|❓| +|HEX_DECODE_BINARY|❓| +|HEX_DECODE_STRING|❓| +|HEX_ENCODE|❓| +|HLL|❓| +|HLL_ACCUMULATE|❓| +|HLL_COMBINE|❓| +|HLL_ESTIMATE|❓| +|HLL_EXPORT|❓| +|HLL_IMPORT|❓| +|HOUR / MINUTE / SECOND|❓| +|ICEBERG_TABLE_FILES|❓| +|ICEBERG_TABLE_SNAPSHOT_REFRESH_HISTORY|❓| +|IFF|✅| +|IFNULL|✅| +|[ NOT ] ILIKE|❓| +|ILIKE ANY|❓| +|[ NOT ] IN|❓| +|INFER_SCHEMA|✅| +|INITCAP|✅| +|INSERT|✅| +|INTEGRATION|❓| +|INVOKER_ROLE|❓| +|INVOKER_SHARE|❓| +|IS [ NOT ] DISTINCT FROM|❓| +|IS [ NOT ] NULL|❓| +|IS_|❓| +|IS_APPLICATION_ROLE_IN_SESSION|❓| +|IS_ARRAY|✅| +|IS_BINARY|✅| +|IS_BOOLEAN|✅| +|IS_CHAR, IS_VARCHAR|✅| +|IS_DATABASE_ROLE_IN_SESSION|❓| +|IS_DATE, IS_DATE_VALUE|✅| +|IS_DECIMAL|✅| +|IS_DOUBLE, IS_REAL|✅| +|IS_GRANTED_TO_INVOKER_ROLE|❓| +|IS_INSTANCE_ROLE_IN_SESSION|❓| +|IS_INTEGER|✅| +|IS_NULL_VALUE|✅| +|IS_OBJECT|✅| +|IS_ORGANIZATION_USER|❓| +|IS_ORGANIZATION_USER_GROUP|❓| +|IS_ORGANIZATION_USER_GROUP_IN_SESSION|❓| +|IS_ROLE_IN_SESSION|❓| +|IS_TIME|✅| +|IS_TIMESTAMP_*|❓| +|JAROWINKLER_SIMILARITY|❓| +|JSON_EXTRACT_PATH_TEXT|❓| +|KURTOSIS|❓| +|LAG|❓| +|LAST_DAY|✅| +|LAST_QUERY_ID|✅| +|LAST_SUCCESSFUL_SCHEDULED_TIME|❓| +|LAST_TRANSACTION|❓| +|LAST_VALUE|❓| +|LEAD|❓| +|LEAST|✅| +|LEAST_IGNORE_NULLS|✅| +|LEFT|❓| +|LENGTH, LEN|✅| +|[ NOT ] LIKE|❓| +|LIKE ALL|❓| +|LIKE ANY|❓| +|LISTAGG|✅| +|LISTING_REFRESH_HISTORY|❓| +|LN|✅| +|LOCALTIME|❓| +|LOCALTIMESTAMP|❓| +|LOG|✅| +|LOGIN_HISTORY, LOGIN_HISTORY_BY_USER|❓| +|LOWER|✅| +|LPAD|❓| +|LTRIM|✅| +|MAP_CAT|✅| +|MAP_CONTAINS_KEY|✅| +|MAP_DELETE|✅| +|MAP_INSERT|✅| +|MAP_KEYS|✅| +|MAP_PICK|✅| +|MAP_SIZE|✅| +|MATERIALIZED_VIEW_REFRESH_HISTORY|❓| +|MAX|✅| +|MAX_BY|✅| +|MD5, MD5_HEX|❓| +|MD5_BINARY|❓| +|MD5_NUMBER — Obsoleted|❓| +|MD5_NUMBER_LOWER64|❓| +|MD5_NUMBER_UPPER64|❓| +|MEDIAN|❓| +|MIN|✅| +|MIN_BY|✅| +|MINHASH|❓| +|MINHASH_COMBINE|❓| +|MOD|✅| +|MODE|✅| +|MODEL_MONITOR_DRIFT_METRIC|❓| +|MODEL_MONITOR_PERFORMANCE_METRIC|❓| +|MODEL_MONITOR_STAT_METRIC|❓| +|MONTHNAME|✅| +|MONTHS_BETWEEN|❓| +|NETWORK_RULE_REFERENCES|❓| +|NEXT_DAY|❓| +|NORMAL|❓| +|NOTIFICATION_HISTORY|❓| +|NTH_VALUE|❓| +|NTILE|❓| +|NULLIF|✅| +|NULLIFZERO|✅| +|NVL|✅| +|NVL2|✅| +|OBJECT_AGG|❓| +|OBJECT_CONSTRUCT|✅| +|OBJECT_CONSTRUCT_KEEP_NULL|✅| +|OBJECT_DELETE|✅| +|OBJECT_INSERT|✅| +|OBJECT_KEYS|✅| +|OBJECT_PICK|❓| +|OCTET_LENGTH|❓| +|PARSE_DOCUMENT (SNOWFLAKE.CORTEX)|❓| +|PARSE_IP|✅| +|PARSE_JSON|✅| +|PARSE_URL|❓| +|PARSE_XML|❓| +|PERCENT_RANK|❓| +|PERCENTILE_CONT|❓| +|PERCENTILE_DISC|❓| +|PI|✅| +|PIPE_USAGE_HISTORY|❓| +|POLICY_CONTEXT|❓| +|POLICY_REFERENCES|❓| +|POSITION|❓| +|POW, POWER|✅| +|PREVIOUS_DAY|❓| +|PROMPT function|❓| +|QUERY_ACCELERATION_HISTORY|❓| +|QUERY_HISTORY, QUERY_HISTORY_BY_*|❓| +|RADIANS|✅| +|RANDOM|✅| +|RANDSTR|✅| +|RANK|✅| +|RATIO_TO_REPORT|✅| +|REDUCE|❓| +|[ NOT ] REGEXP|❓| +|REGEXP_COUNT|❓| +|REGEXP_INSTR|❓| +|REGEXP_LIKE|❓| +|REGEXP_REPLACE|✅| +|REGEXP_SUBSTR|✅| +|REGEXP_SUBSTR_ALL|❓| +|REGR_AVGX|❓| +|REGR_AVGY|❓| +|REGR_COUNT|❓| +|REGR_INTERCEPT|❓| +|REGR_R2|❓| +|REGR_SLOPE|❓| +|REGR_SXX|❓| +|REGR_SXY|❓| +|REGR_SYY|❓| +|REGR_VALX|❓| +|REGR_VALY|❓| +|REPEAT|✅| +|REPLACE|✅| +|REPLICATION_GROUP_REFRESH_HISTORY, REPLICATION_GROUP_REFRESH_HISTORY_ALL|❓| +|REPLICATION_GROUP_REFRESH_PROGRESS, REPLICATION_GROUP_REFRESH_PROGRESS_BY_JOB, REPLICATION_GROUP_REFRESH_PROGRESS_ALL|❓| +|REPLICATION_GROUP_USAGE_HISTORY|❓| +|REPLICATION_USAGE_HISTORY|❓| +|REST_EVENT_HISTORY|❓| +|RESULT_SCAN|✅| +|REVERSE|✅| +|RIGHT|✅| +|[ NOT ] RLIKE|❓| +|ROUND|❓| +|ROW_NUMBER|✅| +|RPAD|❓| +|RTRIM|✅| +|RTRIMMED_LENGTH|❓| +|SANITIZE_WEBHOOK_CONTENT|❓| +|SCHEDULED_TIME|❓| +|SEARCH|❓| +|SEARCH_IP|❓| +|SEARCH_OPTIMIZATION_HISTORY|❓| +|SEARCH_PREVIEW (SNOWFLAKE.CORTEX)|❓| +|SENTIMENT (SNOWFLAKE.CORTEX)|❓| +|SEQ1 / SEQ2 / SEQ4 / SEQ8|❓| +|SERVERLESS_ALERT_HISTORY|❓| +|SERVERLESS_TASK_HISTORY|❓| +|SHA1, SHA1_HEX|❓| +|SHA1_BINARY|❓| +|SHA2, SHA2_HEX|❓| +|SHA2_BINARY|❓| +|SHOW_PYTHON_PACKAGES_DEPENDENCIES|❓| +|SIGN|✅| +|SIN|✅| +|SINH|✅| +|SKEW|❓| +|SOUNDEX|❓| +|SOUNDEX_P123|❓| +|SPACE|❓| +|SPLIT|✅| +|SPLIT_PART|❓| +|SPLIT_TEXT_RECURSIVE_CHARACTER (SNOWFLAKE.CORTEX)|❓| +|SPLIT_TO_TABLE|❓| +|SQRT|✅| +|SQUARE|❓| +|ST_AREA|❓| +|ST_ASEWKB|❓| +|ST_ASEWKT|❓| +|ST_ASGEOJSON|❓| +|ST_ASWKB, ST_ASBINARY|❓| +|ST_ASWKT, ST_ASTEXT|❓| +|ST_AZIMUTH|❓| +|ST_BUFFER|❓| +|ST_CENTROID|❓| +|ST_COLLECT|❓| +|ST_CONTAINS|❓| +|ST_COVEREDBY|❓| +|ST_COVERS|❓| +|ST_DIFFERENCE|❓| +|ST_DIMENSION|❓| +|ST_DISJOINT|❓| +|ST_DISTANCE|❓| +|ST_DWITHIN|❓| +|ST_ENDPOINT|❓| +|ST_ENVELOPE|❓| +|ST_GEOGFROMGEOHASH|❓| +|ST_GEOGPOINTFROMGEOHASH|❓| +|ST_GEOGRAPHYFROMWKB|❓| +|ST_GEOGRAPHYFROMWKT|❓| +|ST_GEOHASH|❓| +|ST_GEOMETRYFROMWKB|❓| +|ST_GEOMETRYFROMWKT|❓| +|ST_GEOMFROMGEOHASH|❓| +|ST_GEOMPOINTFROMGEOHASH|❓| +|ST_HAUSDORFFDISTANCE|❓| +|ST_INTERPOLATE|❓| +|ST_INTERSECTION|❓| +|ST_INTERSECTION_AGG|❓| +|ST_INTERSECTS|❓| +|ST_ISVALID|❓| +|ST_LENGTH|❓| +|ST_MAKEGEOMPOINT, ST_GEOMPOINT|❓| +|ST_MAKELINE|❓| +|ST_MAKEPOINT, ST_POINT|❓| +|ST_MAKEPOLYGON, ST_POLYGON|❓| +|ST_MAKEPOLYGONORIENTED|❓| +|ST_NPOINTS, ST_NUMPOINTS|❓| +|ST_PERIMETER|❓| +|ST_POINTN|❓| +|ST_SETSRID|❓| +|ST_SIMPLIFY|❓| +|ST_SRID|❓| +|ST_STARTPOINT|❓| +|ST_SYMDIFFERENCE|❓| +|ST_TRANSFORM|❓| +|ST_UNION|❓| +|ST_UNION_AGG|❓| +|ST_WITHIN|❓| +|ST_X|❓| +|ST_XMAX|❓| +|ST_XMIN|❓| +|ST_Y|❓| +|ST_YMAX|❓| +|ST_YMIN|❓| +|STAGE_DIRECTORY_FILE_REGISTRATION_HISTORY|❓| +|STAGE_STORAGE_USAGE_HISTORY|❓| +|STARTSWITH|❓| +|STDDEV, STDDEV_SAMP|❓| +|STDDEV_POP|❓| +|STRIP_NULL_VALUE|❓| +|STRTOK|❓| +|STRTOK_SPLIT_TO_TABLE|❓| +|STRTOK_TO_ARRAY|❓| +|SUBSTR, SUBSTRING|❓| +|SUM|✅| +|SUMMARIZE (SNOWFLAKE.CORTEX)|❓| +|SYSDATE|❓| +|SYSTEM$ABORT_SESSION|❓| +|SYSTEM$ABORT_TRANSACTION|❓| +|SYSTEM$ADD_EVENT (for Snowflake Scripting)|❓| +|SYSTEM$ADD_REFERENCE|❓| +|SYSTEM$ALLOWLIST|❓| +|SYSTEM$ALLOWLIST_PRIVATELINK|❓| +|SYSTEM$APPLICATION_GET_LOG_LEVEL|❓| +|SYSTEM$APPLICATION_GET_METRIC_LEVEL|❓| +|SYSTEM$APPLICATION_GET_TRACE_LEVEL|❓| +|SYSTEM$AUTHORIZE_PRIVATELINK|❓| +|SYSTEM$AUTHORIZE_STAGE_PRIVATELINK_ACCESS|❓| +|SYSTEM$AUTO_REFRESH_STATUS|❓| +|SYSTEM$BEGIN_DEBUG_APPLICATION|❓| +|SYSTEM$BEHAVIOR_CHANGE_BUNDLE_STATUS|❓| +|SYSTEM$BLOCK_INTERNAL_STAGES_PUBLIC_ACCESS|❓| +|SYSTEM$CANCEL_ALL_QUERIES|✅| +|SYSTEM$CANCEL_QUERY|❓| +|SYSTEM$CLEANUP_DATABASE_ROLE_GRANTS|❓| +|SYSTEM$CLIENT_VERSION_INFO|✅| +|SYSTEM$CLUSTERING_DEPTH|❓| +|SYSTEM$CLUSTERING_INFORMATION|❓| +|SYSTEM$CLUSTERING_RATIO — Deprecated|❓| +|SYSTEM$COMMIT_MOVE_ORGANIZATION_ACCOUNT|❓| +|SYSTEM$CONVERT_PIPES_SQS_TO_SNS|❓| +|SYSTEM$CREATE_BILLING_EVENT|❓| +|SYSTEM$CREATE_BILLING_EVENTS|❓| +|SYSTEM$CURRENT_USER_TASK_NAME|❓| +|SYSTEM$DATA_METRIC_SCAN|❓| +|SYSTEM$DATABASE_REFRESH_HISTORY — Deprecated|❓| +|SYSTEM$DATABASE_REFRESH_PROGRESS, SYSTEM$DATABASE_REFRESH_PROGRESS_BY_JOB — Deprecated|❓| +|SYSTEM$DECODE_PAT|❓| +|SYSTEM$DEPROVISION_PRIVATELINK_ENDPOINT|❓| +|SYSTEM$DISABLE_BEHAVIOR_CHANGE_BUNDLE|❓| +|SYSTEM$DISABLE_DATABASE_REPLICATION|❓| +|SYSTEM$DISABLE_GLOBAL_DATA_SHARING_FOR_ACCOUNT|❓| +|SYSTEM$DISABLE_PREVIEW_ACCESS|❓| +|SYSTEM$ENABLE_BEHAVIOR_CHANGE_BUNDLE|❓| +|SYSTEM$ENABLE_GLOBAL_DATA_SHARING_FOR_ACCOUNT|❓| +|SYSTEM$ENABLE_PREVIEW_ACCESS|❓| +|SYSTEM$END_DEBUG_APPLICATION|❓| +|SYSTEM$ESTIMATE_AUTOMATIC_CLUSTERING_COSTS|❓| +|SYSTEM$ESTIMATE_QUERY_ACCELERATION|❓| +|SYSTEM$ESTIMATE_SEARCH_OPTIMIZATION_COSTS|❓| +|SYSTEM$EXPLAIN_JSON_TO_TEXT|❓| +|SYSTEM$EXPLAIN_PLAN_JSON|❓| +|SYSTEM$EXTERNAL_TABLE_PIPE_STATUS|❓| +|SYSTEM$FINISH_OAUTH_FLOW|❓| +|SYSTEM$GENERATE_SAML_CSR|❓| +|SYSTEM$GENERATE_SCIM_ACCESS_TOKEN|❓| +|SYSTEM$GET_ALL_REFERENCES|❓| +|SYSTEM$GET_AWS_SNS_IAM_POLICY|❓| +|SYSTEM$GET_CLASSIFICATION_RESULT|❓| +|SYSTEM$GET_CMK_AKV_CONSENT_URL|❓| +|SYSTEM$GET_CMK_CONFIG|❓| +|SYSTEM$GET_CMK_INFO|❓| +|SYSTEM$GET_CMK_KMS_KEY_POLICY|❓| +|SYSTEM$GET_COMPUTE_POOL_PENDING_MAINTENANCE|❓| +|SYSTEM$GET_DEBUG_STATUS|❓| +|SYSTEM$GET_DIRECTORY_TABLE_STATUS|❓| +|SYSTEM$GET_GCP_KMS_CMK_GRANT_ACCESS_CMD|❓| +|SYSTEM$GET_HASH_FOR_APPLICATION|❓| +|SYSTEM$GET_ICEBERG_TABLE_INFORMATION|❓| +|SYSTEM$GET_LOGIN_FAILURE_DETAILS|❓| +|SYSTEM$GET_PREDECESSOR_RETURN_VALUE|❓| +|SYSTEM$GET_PREVIEW_ACCESS_STATUS|❓| +|SYSTEM$GET_PRIVATELINK|❓| +|SYSTEM$GET_PRIVATELINK_AUTHORIZED_ENDPOINTS|❓| +|SYSTEM$GET_PRIVATELINK_CONFIG|❓| +|SYSTEM$GET_PRIVATELINK_ENDPOINT_REGISTRATIONS|❓| +|SYSTEM$GET_PRIVATELINK_ENDPOINTS_INFO|❓| +|SYSTEM$GET_REFERENCED_OBJECT_ID_HASH|❓| +|SYSTEM$GET_RESULTSET_STATUS|❓| +|SYSTEM$GET_SERVICE_DNS_DOMAIN|❓| +|SYSTEM$GET_SERVICE_LOGS|❓| +|SYSTEM$GET_SERVICE_STATUS — Deprecated|❓| +|SYSTEM$GET_SNOWFLAKE_PLATFORM_INFO|❓| +|SYSTEM$GET_TAG|✅| +|SYSTEM$GET_TAG_ALLOWED_VALUES|❓| +|SYSTEM$GET_TAG_ON_CURRENT_COLUMN|❓| +|SYSTEM$GET_TAG_ON_CURRENT_TABLE|❓| +|SYSTEM$GET_TASK_GRAPH_CONFIG|❓| +|SYSTEM$GLOBAL_ACCOUNT_SET_PARAMETER|❓| +|SYSTEM$INITIATE_MOVE_ORGANIZATION_ACCOUNT|❓| +|SYSTEM$INTERNAL_STAGES_PUBLIC_ACCESS_STATUS|❓| +|SYSTEM$IS_APPLICATION_INSTALLED_FROM_SAME_ACCOUNT|❓| +|SYSTEM$IS_APPLICATION_SHARING_EVENTS_WITH_PROVIDER|❓| +|SYSTEM$IS_GLOBAL_DATA_SHARING_ENABLED_FOR_ACCOUNT|❓| +|SYSTEM$IS_LISTING_PURCHASED|❓| +|SYSTEM$IS_LISTING_TRIAL|❓| +|SYSTEM$LAST_CHANGE_COMMIT_TIME|❓| +|SYSTEM$LINK_ACCOUNT_OBJECTS_BY_NAME|❓| +|SYSTEM$LINK_ORGANIZATION_USER|❓| +|SYSTEM$LINK_ORGANIZATION_USER_GROUP|❓| +|SYSTEM$LIST_APPLICATION_RESTRICTED_FEATURES|❓| +|SYSTEM$LIST_ICEBERG_TABLES_FROM_CATALOG|❓| +|SYSTEM$LIST_NAMESPACES_FROM_CATALOG|❓| +|SYSTEM$LOG, SYSTEM$LOG_ (for Snowflake Scripting)|✅| +|SYSTEM$MIGRATE_SAML_IDP_REGISTRATION|❓| +|SYSTEM$PIPE_FORCE_RESUME|❓| +|SYSTEM$PIPE_REBINDING_WITH_NOTIFICATION_CHANNEL|❓| +|SYSTEM$PIPE_STATUS|❓| +|SYSTEM$PROVISION_PRIVATELINK_ENDPOINT|❓| +|SYSTEM$QUERY_REFERENCE|❓| +|SYSTEM$REFERENCE|❓| +|SYSTEM$REGISTER_CMK_INFO|❓| +|SYSTEM$REGISTER_PRIVATELINK_ENDPOINT|❓| +|SYSTEM$REGISTRY_LIST_IMAGES — Deprecated|❓| +|SYSTEM$REMOVE_ALL_REFERENCES|❓| +|SYSTEM$REMOVE_REFERENCE|✅| +|SYSTEM$RESTORE_PRIVATELINK_ENDPOINT|❓| +|SYSTEM$REVOKE_PRIVATELINK|❓| +|SYSTEM$REVOKE_STAGE_PRIVATELINK_ACCESS|❓| +|SYSTEM$SCHEDULE_ASYNC_REPLICATION_GROUP_REFRESH|❓| +|SYSTEM$SEND_NOTIFICATIONS_TO_CATALOG|❓| +|SYSTEM$SET_APPLICATION_RESTRICTED_FEATURE_ACCESS|❓| +|SYSTEM$SET_EVENT_SHARING_ACCOUNT_FOR_REGION|❓| +|SYSTEM$SET_REFERENCE|✅| +|SYSTEM$SET_RETURN_VALUE|❓| +|SYSTEM$SET_SPAN_ATTRIBUTES (for Snowflake Scripting)|❓| +|SYSTEM$SHOW_ACTIVE_BEHAVIOR_CHANGE_BUNDLES|❓| +|SYSTEM$SHOW_BUDGETS_FOR_RESOURCE|❓| +|SYSTEM$SHOW_BUDGETS_IN_ACCOUNT|❓| +|SYSTEM$SHOW_EVENT_SHARING_ACCOUNTS|❓| +|SYSTEM$SHOW_MOVE_ORGANIZATION_ACCOUNT_STATUS|❓| +|SYSTEM$SHOW_OAUTH_CLIENT_SECRETS|❓| +|SYSTEM$SNOWPIPE_STREAMING_UPDATE_CHANNEL_OFFSET_TOKEN|❓| +|SYSTEM$START_OAUTH_FLOW|❓| +|SYSTEM$START_USER_EMAIL_VERIFICATION|❓| +|SYSTEM$STREAM_BACKLOG|❓| +|SYSTEM$STREAM_GET_TABLE_TIMESTAMP|❓| +|SYSTEM$STREAM_HAS_DATA|❓| +|SYSTEM$TASK_DEPENDENTS_ENABLE|✅| +|SYSTEM$TASK_RUNTIME_INFO|❓| +|SYSTEM$TRIGGER_LISTING_REFRESH|❓| +|SYSTEM$TYPEOF|❓| +|SYSTEM$UNBLOCK_INTERNAL_STAGES_PUBLIC_ACCESS|❓| +|SYSTEM$UNLINK_ORGANIZATION_USER|❓| +|SYSTEM$UNLINK_ORGANIZATION_USER_GROUP|❓| +|SYSTEM$UNREGISTER_PRIVATELINK_ENDPOINT|❓| +|SYSTEM$UNSET_EVENT_SHARING_ACCOUNT_FOR_REGION|❓| +|SYSTEM$USER_TASK_CANCEL_ONGOING_EXECUTIONS|❓| +|SYSTEM$VALIDATE_STORAGE_INTEGRATION|❓| +|SYSTEM$VERIFY_CATALOG_INTEGRATION|❓| +|SYSTEM$VERIFY_CMK_INFO|❓| +|SYSTEM$VERIFY_EXTERNAL_OAUTH_TOKEN|❓| +|SYSTEM$VERIFY_EXTERNAL_VOLUME|❓| +|SYSTEM$WAIT|✅| +|SYSTEM$WAIT_FOR_SERVICES|❓| +|SYSTEM$WHITELIST — Deprecated|❓| +|SYSTEM$WHITELIST_PRIVATELINK — Deprecated|❓| +|SYSTIMESTAMP|❓| +|TAG_REFERENCES|❓| +|TAG_REFERENCES_ALL_COLUMNS|❓| +|TAG_REFERENCES_WITH_LINEAGE|❓| +|TAN|✅| +|TANH|✅| +|TASK_DEPENDENTS|❓| +|TASK_HISTORY|❓| +|TEXT_HTML|❓| +|TEXT_PLAIN|❓| +|TIME_FROM_PARTS|❓| +|TIME_SLICE|❓| +|TIMEADD|✅| +|TIMEDIFF|✅| +|TIMESTAMP_FROM_PARTS|❓| +|TIMESTAMPADD|✅| +|TIMESTAMPDIFF|✅| +|TO_ARRAY|✅| +|TO_BINARY|✅| +|TO_BOOLEAN|✅| +|TO_CHAR, TO_VARCHAR|✅| +|TO_DATE, DATE|✅| +|TO_DECIMAL, TO_NUMBER, TO_NUMERIC|✅| +|TO_DOUBLE|✅| +|TO_FILE|❓| +|TO_GEOGRAPHY|❓| +|TO_GEOMETRY|❓| +|TO_JSON|❓| +|TO_OBJECT|✅| +|TO_QUERY|❓| +|TO_TIME, TIME|✅| +|TO_TIMESTAMP / TO_TIMESTAMP_*|❓| +|TO_VARIANT|✅| +|TO_XML|❓| +|TRANSFORM|❓| +|TRANSLATE (SNOWFLAKE.CORTEX)|❓| +|TRANSLATE|❓| +|TRIM|✅| +|TRUNCATE, TRUNC|❓| +|TRUNCATE, TRUNC|❓| +|TRY_BASE64_DECODE_BINARY|❓| +|TRY_BASE64_DECODE_STRING|❓| +|TRY_CAST|❓| +|TRY_COMPLETE (SNOWFLAKE.CORTEX)|❓| +|TRY_DECRYPT|❓| +|TRY_DECRYPT_RAW|❓| +|TRY_HEX_DECODE_BINARY|❓| +|TRY_HEX_DECODE_STRING|❓| +|TRY_PARSE_JSON|✅| +|TRY_TO_BINARY|❓| +|TRY_TO_BOOLEAN|❓| +|TRY_TO_DATE|❓| +|TRY_TO_DECIMAL, TRY_TO_NUMBER, TRY_TO_NUMERIC|✅| +|TRY_TO_DOUBLE|❓| +|TRY_TO_FILE|❓| +|TRY_TO_GEOGRAPHY|❓| +|TRY_TO_GEOMETRY|❓| +|TRY_TO_TIME|❓| +|TRY_TO_TIMESTAMP / TRY_TO_TIMESTAMP_*|❓| +|TYPEOF|❓| +|UNICODE|❓| +|UNIFORM|❓| +|UPPER|✅| +|UUID_STRING|✅| +|VALIDATE|❓| +|VALIDATE_PIPE_LOAD|❓| +|VAR_POP|❓| +|VAR_SAMP|❓| +|VARIANCE, VARIANCE_SAMP|❓| +|VARIANCE_POP|❓| +|VECTOR_COSINE_SIMILARITY|❓| +|VECTOR_INNER_PRODUCT|❓| +|VECTOR_L1_DISTANCE|❓| +|VECTOR_L2_DISTANCE|❓| +|WAREHOUSE_LOAD_HISTORY|❓| +|WAREHOUSE_METERING_HISTORY|❓| +|WIDTH_BUCKET|❓| +|XMLGET|❓| +|YEAR* / DAY* / WEEK* / MONTH / QUARTER|❓| +|ZEROIFNULL|✅| +|ZIPF|❓| diff --git a/src/content/docs/snowflake/tooling/user-interface.md b/src/content/docs/snowflake/tooling/user-interface.md new file mode 100644 index 00000000..011a0b41 --- /dev/null +++ b/src/content/docs/snowflake/tooling/user-interface.md @@ -0,0 +1,54 @@ +--- +title: User Interface +description: Get started with LocalStack for Snowflake Web User Interface +--- + +## Introduction + +The Snowflake emulator provides a User Interface (UI) via the [LocalStack Web Application](https://app.localstack.cloud/). The User Interface allows you to: + +* Run SQL queries and view results using a Query Editor. +* View detailed request/response traces of API calls. +* Forward queries to a real Snowflake instance using a proxy. + +To access the User Interface, you need to start the Snowflake emulator and access the **Snowflake** tab in your default instance of the LocalStack Web Application. This User Interface is available only when the Snowflake emulator is running. Please note that it does not connect to the real Snowflake cloud environment (except during a proxy connection) or any other external service on the Internet. + +:::note +Please note that the Snowflake User Interface is still experimental and under active development. +::: + +## Getting started + +This guide is designed for users new to the Snowflake emulator Web UI. Start your Snowflake emulator using the following command: + +```bash +IMAGE_NAME=localstack/snowflake localstack start +``` + +Navigate to [**https://app.localstack.cloud/inst/default/snowflake**](https://app.localstack.cloud/inst/default/snowflake) to access the User Interface. + +### Run SQL queries + +The User Interface provides a **Worksheet** tab that allows you to run SQL queries and view the results. + +![Running SQL queries](/images/snowflake/run-sql-queries-web-ui.png) + +You can click on **Warehouses** and **Databases** on the right side of the worksheet to view the available warehouses and databases. + +### View Snowflake logs + +The User Interface provides a **Logs** tab that provides you a detailed view of request/response traces of API calls issued against the local emulator. You can view the request and response headers, body, and status code. + +![View request/response traces](/images/snowflake/request-logs-web-ui.png) + +### Proxy to a real Snowflake instance + +You can forward queries from the Snowflake emulator to a real Snowflake instance using a proxy. + +The User Interface provides a **Proxy** tab that allows you to enter your Snowflake account credentials. Click on the **Save** button to save the credentials. You can now run queries in the Query Editor, and they will be forwarded to the real Snowflake instance. + +:::danger +Be careful when operating the proxy, as it can incur costs and access data in your real Snowflake account. For security reasons, please make sure to use scoped credentials with the least set of required permissions (ideally read-only). Only run the proxy against test/staging environments, and never against a production database. +::: + +![Forward queries to a real Snowflake instance](/images/snowflake/proxy-web-ui.png) diff --git a/src/content/docs/snowflake/tooling/web-user-interface.md b/src/content/docs/snowflake/tooling/web-user-interface.md deleted file mode 100644 index a5471e85..00000000 --- a/src/content/docs/snowflake/tooling/web-user-interface.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -title: Web UI -description: Get started with LocalStack for Snowflake Web User Interface ---- - - - -## Introduction - -The Snowflake emulator provides a simple web user interface (UI) accessible via a web browser. The Web UI allows you to: - -* Run SQL queries and view results using a Query Editor. -* View detailed request/response traces of API calls. -* Forward queries to a real Snowflake instance using a proxy. - -Access the Web UI at [https://snowflake.localhost.localstack.cloud/](https://snowflake.localhost.localstack.cloud/). The Web UI is available only when the Snowflake emulator is running. It does not connect to the Snowflake service (except during a proxy connection) or any other external service on the Internet. - -{{< alert title="Info" >}} -Please note that the LocalStack Snowflake Web UI is still experimental and under active development. -{{< /alert >}} - -## Getting started - -This guide is designed for users new to the Snowflake emulator Web UI. Start your Snowflake emulator using the following command: - -```bash -EXTRA_CORS_ALLOWED_ORIGINS='*' IMAGE_NAME=localstack/snowflake DEBUG=1 localstack start -``` - -The `EXTRA_CORS_ALLOWED_ORIGINS` environment variable is used to allow CORS requests from the Web UI. Navigate to [https://snowflake.localhost.localstack.cloud/](https://snowflake.localhost.localstack.cloud/) to access the Web UI. - -### Run SQL queries - -The Web UI provides a Query Editor that allows you to run SQL queries and view the results. - -Running SQL queries -

- -You can click on **Warehouses** and **Databases** on the left side of the screen to view the available warehouses and databases. - -### View request/response traces - -The Web UI provides a detailed view of request/response traces of API calls. You can view the request and response headers, body, and status code. Click on the **Request Logs** tab in the Web UI to view the request/response traces. - -View request/response traces - -### Forward queries to a real Snowflake instance - -You can forward queries from the Snowflake emulator to a real Snowflake instance using a proxy. - -To forward queries, click on the **Proxy** tab in the Web UI and enter the Snowflake account username & password. Click on the **Save** button to save the credentials. You can now run queries in the Query Editor, and they will be forwarded to the real Snowflake instance. -{{< alert title="Important" color="danger" >}} -Be careful when operating the proxy, as it can incur costs and access data in your real Snowflake account. For security reasons, please make sure to use scoped credentials with the least set of required permissions (ideally read-only). Only run the proxy against test/staging environments, and never against a production database. -{{< /alert >}} -Forward queries to a real Snowflake instance \ No newline at end of file diff --git a/src/content/docs/snowflake/tutorials/aws-lambda-localstack-snowpark.md b/src/content/docs/snowflake/tutorials/aws-lambda-localstack-snowpark.md index fc717568..91ddcfa1 100644 --- a/src/content/docs/snowflake/tutorials/aws-lambda-localstack-snowpark.md +++ b/src/content/docs/snowflake/tutorials/aws-lambda-localstack-snowpark.md @@ -20,7 +20,7 @@ The code in this tutorial is available on [GitHub](https://github.com/localstack ## Prerequisites - [`localstack` CLI](https://docs.localstack.cloud/getting-started/installation/#localstack-cli) with a [`LOCALSTACK_AUTH_TOKEN`](https://docs.localstack.cloud/getting-started/auth-token/) -- [LocalStack for Snowflake]({{< ref "installation" >}}) +- [LocalStack for Snowflake](/snowflake/getting-started/) - [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html) & [`awslocal` wrapper](https://docs.localstack.cloud/user-guide/integrations/aws-cli/#localstack-aws-cli-awslocal) - Python 3.10 installed locally @@ -28,10 +28,10 @@ The code in this tutorial is available on [GitHub](https://github.com/localstack Create a new directory for your lambda function and navigate to it: -{{< command >}} -$ mkdir -p lambda-snowpark -$ cd lambda-snowpark -{{< / command >}} +```bash +mkdir -p lambda-snowpark +cd lambda-snowpark +``` Create a new file named `handler.py` and add the following code: @@ -112,24 +112,24 @@ You can now install the dependencies for your Lambda function. These include: Run the following command: -{{< command >}} -$ pip3 install \ +```bash +pip3 install \ --platform manylinux2010_x86_64 \ --implementation cp \ --only-binary=:all: --upgrade \ --target ./libs \ snowflake-connector-python==2.7.9 boto3==1.26.153 botocore==1.29.153 -{{< / command >}} +``` ## Package the Lambda function Package the Lambda function and its dependencies into a ZIP file. Run the following command: -{{< command >}} -$ mkdir -p build -$ cp -r libs/* build/ -$ (cd build && zip -q -r function-py.zip .) -{{< / command >}} +```bash +mkdir -p build +cp -r libs/* build/ +(cd build && zip -q -r function-py.zip .) +``` You have now created a ZIP file named `function-py.zip` that contains the Lambda function and its dependencies. @@ -137,13 +137,13 @@ You have now created a ZIP file named `function-py.zip` that contains the Lambda Start your LocalStack container in your preferred terminal/shell. -{{< command >}} -$ export LOCALSTACK_AUTH_TOKEN= -$ DEBUG=1 \ +```bash +export LOCALSTACK_AUTH_TOKEN= +DEBUG=1 \ LAMBDA_RUNTIME_ENVIRONMENT_TIMEOUT=180 \ IMAGE_NAME=localstack/snowflake \ localstack start -{{< / command >}} +``` > The `DEBUG=1` environment variable is set to enable debug logs. It would allow you to see the SQL queries executed by the Lambda function. The `LAMBDA_RUNTIME_ENVIRONMENT_TIMEOUT` environment variable is set to increase the Lambda function's timeout to 180 seconds. @@ -151,22 +151,22 @@ $ DEBUG=1 \ You can now deploy the Lambda function to LocalStack using the `awslocal` CLI. Run the following command: -{{< command >}} -$ awslocal lambda create-function \ +```bash +awslocal lambda create-function \ --function-name localstack-snowflake-lambda-example \ --runtime python3.10 \ --timeout 180 \ --zip-file fileb://build/function-py.zip \ --handler handler.lambda_handler \ --role arn:aws:iam::000000000000:role/lambda-role -{{< / command >}} +``` After successfully deploying the Lambda function, you will receive a response with the details of the function. You can now invoke the function using the `awslocal` CLI: -{{< command >}} -$ awslocal lambda invoke --function-name localstack-snowflake-lambda-example \ +```bash +awslocal lambda invoke --function-name localstack-snowflake-lambda-example \ --payload '{"body": "test" }' output.txt -{{< / command >}} +``` You will receive a response with the details of the invocation. You can view the output in the `output.txt` file. To see the SQL queries executed by the Lambda function, check the logs by navigating to LocalStack logs (`localstack logs`). diff --git a/src/content/docs/snowflake/tutorials/credit-scoring-with-localstack-snowpark.md b/src/content/docs/snowflake/tutorials/credit-scoring-with-localstack-snowpark.md index d1a0cba8..07457214 100644 --- a/src/content/docs/snowflake/tutorials/credit-scoring-with-localstack-snowpark.md +++ b/src/content/docs/snowflake/tutorials/credit-scoring-with-localstack-snowpark.md @@ -15,8 +15,8 @@ The Jupyter Notebook and the dataset used in this tutorial are available on [Git ## Prerequisites - [`localstack` CLI](https://docs.localstack.cloud/getting-started/installation/#localstack-cli) with a [`LOCALSTACK_AUTH_TOKEN`](https://docs.localstack.cloud/getting-started/auth-token/) -- [LocalStack for Snowflake]({{< ref "installation" >}}) -- [Snowpark]({{< ref "snowpark" >}}) with other Python libraries +- [LocalStack for Snowflake](/snowflake/getting-started/) +- [Snowpark](/snowflake/integrations/snowpark) with other Python libraries - [Jupyter Notebook](https://jupyter.org/install#jupyter-notebook) You should also download [`credit_files.csv`](https://github.com/localstack-samples/localstack-snowflake-samples/blob/main/credit-scoring-with-snowpark/credit_files.csv) and [`credit_request.csv`](https://github.com/localstack-samples/localstack-snowflake-samples/blob/main/credit-scoring-with-snowpark/credit_request.csv) files from the LocalStack repository. The files should be present in the same directory as your Jupyter Notebook. @@ -25,10 +25,10 @@ You should also download [`credit_files.csv`](https://github.com/localstack-samp Start your LocalStack container in your preferred terminal/shell. -{{< command >}} -$ export LOCALSTACK_AUTH_TOKEN= -$ IMAGE_NAME=localstack/snowflake localstack start -{{< / command >}} +```bash +export LOCALSTACK_AUTH_TOKEN= +IMAGE_NAME=localstack/snowflake localstack start +``` ## Create a Snowpark session @@ -126,7 +126,7 @@ credit_df.toPandas().hist(figsize=(15,15)) The following output is displayed:

-credit_df_hist +![credit_df_hist](/images/snowflake/credit_df_hist.png)

You can also visualize the categorical features of the `credit_df` table: @@ -155,8 +155,8 @@ plt.show() The following output is displayed:

-credit_df_cat +![credit_df_cat](/images/snowflake/credit_df_cat.png) ## Conclusion -You can now perform further experimentations with the Snowflake emulator. For example, you can use the Snowpark API to run queries to get various insights, such as determining the range of loans per different category. \ No newline at end of file +You can now perform further experimentations with the Snowflake emulator. For example, you can use the Snowpark API to run queries to get various insights, such as determining the range of loans per different category. diff --git a/src/content/docs/snowflake/tutorials/index.md b/src/content/docs/snowflake/tutorials/index.md deleted file mode 100644 index fcb647e0..00000000 --- a/src/content/docs/snowflake/tutorials/index.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: Tutorials -description: This is a dummy description. -template: doc ---- - -Welcome to our Snowflake tutorials! \ No newline at end of file