Skip to content

Release to Cloud

Release to Cloud #18

# What?
#
# Tag and release an arbitrary ref. Uploads to an internal archive for further processing.
#
# How?
#
# After checking out and testing the provided ref, the image is built and uploaded.
#
# When?
#
# Manual trigger.
name: "Release internal patch"
on:
workflow_dispatch:
inputs:
version_number:
description: "The release version number (i.e. 1.0.0b1)"
type: string
required: true
ref:
description: "The ref (sha or branch name) to use"
type: string
default: "main"
required: true
package_test_command:
description: "Package test command"
type: string
default: "python -c \"import dbt.adapters.spark\""
required: true
defaults:
run:
shell: "bash"
env:
PYTHON_TARGET_VERSION: 3.8
jobs:
run-unit-tests:
name: "Unit tests"
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: "Check out the repository"
uses: actions/checkout@v3
- name: "Set up Python ${{ env.PYTHON_TARGET_VERSION }}"
uses: actions/setup-python@v4
with:
python-version: "${{ env.PYTHON_TARGET_VERSION }}"
- name: Install python dependencies
run: |
sudo apt-get update
sudo apt-get install libsasl2-dev
python -m pip install --user --upgrade pip
python -m pip --version
python -m pip install -r requirements.txt
python -m pip install -r dev-requirements.txt
python -m pip install -e .
- name: Run unit tests
run: python -m pytest --color=yes --csv unit_results.csv -v tests/unit
run-integration-tests:
name: "${{ matrix.test }}"
needs: [run-unit-tests]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
test:
- "apache_spark"
- "spark_session"
- "databricks_sql_endpoint"
- "databricks_cluster"
- "databricks_http_cluster"
env:
DBT_INVOCATION_ENV: github-actions
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
DD_SITE: datadoghq.com
DD_ENV: ci
DD_SERVICE: ${{ github.event.repository.name }}
DBT_DATABRICKS_CLUSTER_NAME: ${{ secrets.DBT_DATABRICKS_CLUSTER_NAME }}
DBT_DATABRICKS_HOST_NAME: ${{ secrets.DBT_DATABRICKS_HOST_NAME }}
DBT_DATABRICKS_ENDPOINT: ${{ secrets.DBT_DATABRICKS_ENDPOINT }}
DBT_DATABRICKS_TOKEN: ${{ secrets.DBT_DATABRICKS_TOKEN }}
DBT_DATABRICKS_USER: ${{ secrets.DBT_DATABRICKS_USERNAME }}
DBT_TEST_USER_1: "buildbot+dbt_test_user_1@dbtlabs.com"
DBT_TEST_USER_2: "buildbot+dbt_test_user_2@dbtlabs.com"
DBT_TEST_USER_3: "buildbot+dbt_test_user_3@dbtlabs.com"
steps:
- name: Check out the repository
if: github.event_name != 'pull_request_target'
uses: actions/checkout@v3
with:
persist-credentials: false
# explicitly checkout the branch for the PR,
# this is necessary for the `pull_request` event
- name: Check out the repository (PR)
if: github.event_name == 'pull_request_target'
uses: actions/checkout@v3
with:
persist-credentials: false
ref: ${{ github.event.pull_request.head.ref }}
# the python version used here is not what is used in the tests themselves
- name: Set up Python for dagger
uses: actions/setup-python@v4
with:
python-version: "3.11"
- name: Install python dependencies
run: |
python -m pip install --user --upgrade pip
python -m pip --version
python -m pip install -r dagger/requirements.txt
- name: "Run tests for ${{ matrix.test }}"
run: python dagger/run_dbt_spark_tests.py --profile ${{ matrix.test }}
invoke-reusable-workflow:
name: "Build and Release Internally"
needs: [run-integration-tests]
uses: "dbt-labs/dbt-release/.github/workflows/internal-archive-release.yml@mp/add-maintainer-workflows"
with:
version_number: "${{ inputs.version_number }}"
package_test_command: "${{ inputs.package_test_command }}"
dbms_name: "spark"
ref: "${{ inputs.ref }}"
secrets: "inherit"