Skip to content

Commit

Permalink
Merge branch 'main' into databricks/create-example
Browse files Browse the repository at this point in the history
  • Loading branch information
tatiana committed May 30, 2023
2 parents b1623a3 + 6ed6ed9 commit cee0b28
Show file tree
Hide file tree
Showing 252 changed files with 2,535 additions and 15,471 deletions.
1 change: 0 additions & 1 deletion .deepsource.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ test_patterns = [
"python-sdk/tests/**",
"python-sdk/tests_integration/**",
"python-sdk/test-docker/**",
"sql-cli/tests/**"
]

[[analyzers]]
Expand Down
30 changes: 30 additions & 0 deletions .github/ci-test-connections.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,17 @@ connections:
port: 5439
login: $REDSHIFT_USERNAME
password: $REDSHIFT_PASSWORD
- conn_id: redshift_default
conn_type: redshift
port: 5439
extra:
iam: false
cluster_identifier: <REDSHIFT_CLUSTER_IDENTIFIER>
profile: default
db_user: awsuser
port: 5439,
database: dev
region: 'us-east-1'
- conn_id: s3_conn_benchmark
conn_type: aws
description: null
Expand All @@ -99,6 +110,12 @@ connections:
description: null
extra:
connection_string: $AZURE_WASB_CONN_STRING
- conn_id: wasb_conn_with_access_key
conn_type: wasb
host: astrosdk.blob.core.windows.net
description: null
extra:
shared_access_key: $AZURE_WASB_ACCESS_KEY
- conn_id: gcp_conn_project
conn_type: google_cloud_platform
description: null
Expand Down Expand Up @@ -132,10 +149,23 @@ connections:
schema:
login:
password:
- conn_id: duckdb_memory
conn_type: duckdb
host:
schema:
login:
password:
- conn_id: minio_conn
conn_type: aws
description: null
extra:
aws_access_key_id: "ROOTNAME"
aws_secret_access_key: "CHANGEME123"
endpoint_url: "http://127.0.0.1:9000"
- conn_id: mysql_conn
conn_type: mysql
host: $MYSQL_HOST
login: $MYSQL_LOGIN
password: $MYSQL_PASSWORD
port: 3306
schema: $MYSQL_DB
2 changes: 0 additions & 2 deletions .github/labeler.yml

This file was deleted.

20 changes: 20 additions & 0 deletions .github/workflows/ci-astro-deploy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: "Astro Deploy"
on:
schedule:
- cron: '0 0 * * *'

jobs:
deploy:
env:
ASTRO_DOCKER_REGISTRY: ${{ secrets.ASTRO_DOCKER_REGISTRY }}
ASTRO_ORGANIZATION_ID: ${{ secrets.ASTRO_ORGANIZATION_ID }}
ASTRO_DEPLOYMENT_ID: ${{ secrets.ASTRO_DEPLOYMENT_ID }}
ASTRO_KEY_ID: ${{ secrets.ASTRO_KEY_ID }}
ASTRO_KEY_SECRET: ${{ secrets.ASTRO_KEY_SECRET }}
ASTRO_DEPLOYMENT_ID_SINGLE_WORKER: ${{ secrets.ASTRO_DEPLOYMENT_ID_SINGLE_WORKER }}
ASTRO_KEY_ID_SINGLE_WORKER: ${{ secrets.ASTRO_KEY_ID_SINGLE_WORKER }}
ASTRO_KEY_SECRET_SINGLE_WORKER: ${{ secrets.ASTRO_KEY_SECRET_SINGLE_WORKER }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- run: cd python-sdk/tests_integration/astro_deploy && bash deploy.sh $ASTRO_DOCKER_REGISTRY $ASTRO_ORGANIZATION_ID $ASTRO_DEPLOYMENT_ID $ASTRO_KEY_ID $ASTRO_KEY_SECRET $ASTRO_DEPLOYMENT_ID_SINGLE_WORKER $ASTRO_KEY_ID_SINGLE_WORKER $ASTRO_KEY_SECRET_SINGLE_WORKER
18 changes: 0 additions & 18 deletions .github/workflows/ci-benchmark.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ on:
- labeled
schedule:
- cron: '0 0 * * 1'
- cron: '0 0 * * *'

# This allows a subsequently queued workflow run to interrupt and cancel previous runs
concurrency:
Expand Down Expand Up @@ -65,20 +64,3 @@ jobs:
( echo "cat <<EOF >python-sdk/test-connections.yaml"; cat .github/ci-test-connections.yaml; ) >python-sdk/test-connections.yaml && . python-sdk/test-connections.yaml
python -c 'import os; print(os.getenv("GOOGLE_APPLICATION_CREDENTIALS_JSON", "").strip())' > ${{ env.GOOGLE_APPLICATION_CREDENTIALS }}
cd python-sdk/tests/benchmark && make
Astro-deploy:
if: >-
(github.event_name == 'schedule' && github.event.schedule != '0 0 * * 1')
env:
ASTRO_DOCKER_REGISTRY: ${{ secrets.ASTRO_DOCKER_REGISTRY }}
ASTRO_ORGANIZATION_ID: ${{ secrets.ASTRO_ORGANIZATION_ID }}
ASTRO_DEPLOYMENT_ID: ${{ secrets.ASTRO_DEPLOYMENT_ID }}
ASTRO_KEY_ID: ${{ secrets.ASTRO_KEY_ID }}
ASTRO_KEY_SECRET: ${{ secrets.ASTRO_KEY_SECRET }}
ASTRO_DEPLOYMENT_ID_SINGLE_WORKER: ${{ secrets.ASTRO_DEPLOYMENT_ID_SINGLE_WORKER }}
ASTRO_KEY_ID_SINGLE_WORKER: ${{ secrets.ASTRO_KEY_ID_SINGLE_WORKER }}
ASTRO_KEY_SECRET_SINGLE_WORKER: ${{ secrets.ASTRO_KEY_SECRET_SINGLE_WORKER }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- run: cd python-sdk/tests_integration/astro_deploy && sh deploy.sh $ASTRO_DOCKER_REGISTRY $ASTRO_ORGANIZATION_ID $ASTRO_DEPLOYMENT_ID $ASTRO_KEY_ID $ASTRO_KEY_SECRET $ASTRO_DEPLOYMENT_ID_SINGLE_WORKER $ASTRO_KEY_ID_SINGLE_WORKER $ASTRO_KEY_SECRET_SINGLE_WORKER
19 changes: 14 additions & 5 deletions .github/workflows/ci-python-sdk.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ env:
SFTP_PASSWORD: ${{ secrets.SFTP_PASSWORD }}
AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS: True
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }}
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }}
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }}
AIRFLOW_VAR_FOO: templated_file_name
AWS_BUCKET: tmp9
Expand All @@ -60,6 +61,10 @@ env:
MSSQL_HOST: ${{ secrets.MSSQL_HOST }}
MSSQL_LOGIN: ${{ secrets.MSSQL_LOGIN }}
MSSQL_PASSWORD: ${{ secrets.MSSQL_PASSWORD }}
MYSQL_DB: ${{ secrets.MYSQL_DB }}
MYSQL_HOST: ${{ secrets.MYSQL_HOST }}
MYSQL_LOGIN: ${{ secrets.MYSQL_LOGIN }}
MYSQL_PASSWORD: ${{ secrets.MYSQL_PASSWORD }}

jobs:
Markdown-link-check:
Expand Down Expand Up @@ -196,6 +201,7 @@ jobs:
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }}
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }}
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }}

Run-Unit-tests-Airflow-2-5:
strategy:
Expand Down Expand Up @@ -236,7 +242,7 @@ jobs:
key: ${{ runner.os }}-2.5-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }}
- run: sqlite3 /tmp/sqlite_default.db "VACUUM;"
- run: pip3 install nox
- run: nox -s "test-${{ matrix.version }}(airflow='2.5.0')" -- tests/ --cov=src --cov-report=xml --cov-branch
- run: nox -s "test-${{ matrix.version }}(airflow='2.6.0')" -- tests/ --cov=src --cov-report=xml --cov-branch
- name: Upload coverage
uses: actions/upload-artifact@v2
with:
Expand Down Expand Up @@ -313,7 +319,7 @@ jobs:
- run: python -c 'import os; print(os.getenv("GOOGLE_APPLICATION_CREDENTIALS_JSON", "").strip())' > ${{ env.GOOGLE_APPLICATION_CREDENTIALS }}
- run: sqlite3 /tmp/sqlite_default.db "VACUUM;"
- run: pip3 install nox
- run: nox -s "test-3.8(airflow='2.5.0')" -- tests_integration/ -k "test_load_file.py" --splits 3 --group ${{ matrix.group }} --store-durations --durations-path /tmp/durations-${{ matrix.group }} --cov=src --cov-report=xml --cov-branch
- run: nox -s "test-3.8(airflow='2.6.0')" -- tests_integration/ -k "test_load_file.py" --splits 3 --group ${{ matrix.group }} --store-durations --durations-path /tmp/durations-${{ matrix.group }} --cov=src --cov-report=xml --cov-branch
- run: cat /tmp/durations-${{ matrix.group }}
- name: Upload coverage
uses: actions/upload-artifact@v2
Expand Down Expand Up @@ -409,7 +415,7 @@ jobs:
- run: python -c 'import os; print(os.getenv("GOOGLE_APPLICATION_CREDENTIALS_JSON", "").strip())' > ${{ env.GOOGLE_APPLICATION_CREDENTIALS }}
- run: sqlite3 /tmp/sqlite_default.db "VACUUM;"
- run: pip3 install nox
- run: nox -s "test-3.8(airflow='2.5.0')" -- tests_integration/ -k "test_example_dags.py" --splits 3 --group ${{ matrix.group }} --store-durations --durations-path /tmp/durations-${{ matrix.group }} --cov=src --cov-report=xml --cov-branch
- run: nox -s "test-3.8(airflow='2.6.0')" -- tests_integration/ -k "test_example_dags.py" --splits 3 --group ${{ matrix.group }} --store-durations --durations-path /tmp/durations-${{ matrix.group }} --cov=src --cov-report=xml --cov-branch
- run: cat /tmp/durations-${{ matrix.group }}
- name: Upload coverage
uses: actions/upload-artifact@v2
Expand Down Expand Up @@ -505,7 +511,7 @@ jobs:
- run: python -c 'import os; print(os.getenv("GOOGLE_APPLICATION_CREDENTIALS_JSON", "").strip())' > ${{ env.GOOGLE_APPLICATION_CREDENTIALS }}
- run: sqlite3 /tmp/sqlite_default.db "VACUUM;"
- run: pip3 install nox
- run: nox -s "test-3.8(airflow='2.5.0')" -- tests_integration/ -k "not test_load_file.py and not test_example_dags.py" --splits 11 --group ${{ matrix.group }} --store-durations --durations-path /tmp/durations-${{ matrix.group }} --cov=src --cov-report=xml --cov-branch
- run: nox -s "test-3.8(airflow='2.6.0')" -- tests_integration/ -k "not test_load_file.py and not test_example_dags.py" --splits 11 --group ${{ matrix.group }} --store-durations --durations-path /tmp/durations-${{ matrix.group }} --cov=src --cov-report=xml --cov-branch
- run: cat /tmp/durations-${{ matrix.group }}
- name: Upload coverage
uses: actions/upload-artifact@v2
Expand All @@ -530,6 +536,7 @@ jobs:
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }}
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }}
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }}

Run-example-dag-tests-Airflow-2-2-5:
if: >-
Expand Down Expand Up @@ -612,14 +619,15 @@ jobs:
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }}
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }}
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }}

Generate-Constraints:
if: (github.event_name == 'release' || github.event_name == 'push')
strategy:
fail-fast: false
matrix:
python: [ '3.7', '3.8', '3.9', '3.10' ]
airflow: [ '2.2.5', '2.3.4', '2.4.2', '2.5.1' ]
airflow: [ '2.2.5', '2.3.4', '2.4.2', '2.5.3', '2.6.0']
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
Expand Down Expand Up @@ -679,6 +687,7 @@ jobs:
with:
fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }}
flags: PythonSDK
files: ./python-sdk/coverage.xml

collect-durations:
Expand Down

0 comments on commit cee0b28

Please sign in to comment.