Skip to content

Commit

Permalink
fix bugs and implemented UI
Browse files Browse the repository at this point in the history
  • Loading branch information
cw75 committed May 30, 2023
2 parents d8de6ba + 70d3f74 commit 76298af
Show file tree
Hide file tree
Showing 547 changed files with 38,559 additions and 50,971 deletions.
10 changes: 10 additions & 0 deletions .github/actions/setup-server/action.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
name: Setup Server
description: "Sets up a server and installs the appropriate environment."
inputs:
python-pkg-version:
description: "Specifies test Pypi package version number if not using the latest release."
required: false

runs:
using: composite
Expand Down Expand Up @@ -56,6 +60,12 @@ runs:
shell: bash
run: python3 scripts/install_local.py -g -s -e

- name: Update Pypi package version
shell: bash
run: |
if [ "${{ inputs.python-pkg-version }}" ]; then echo -e "\nversionTag: ${{ inputs.python-pkg-version }}" >> $HOME/.aqueduct/server/config/config.yml; fi
cat $HOME/.aqueduct/server/config/config.yml
- name: Start the server again
shell: bash
run: (aqueduct start --disable-usage-stats --verbose > $SERVER_LOGS_FILE 2>&1 &)
Expand Down
142 changes: 142 additions & 0 deletions .github/workflows/airflow-periodic-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
name: Airflow Periodic Integration Tests

on:
schedule:
- cron: "0 21 * * 4" # Run at 9PM PST on every Thursday
workflow_dispatch:

jobs:
publish-pypi:
# similar to release scripts, but publish to test pypi:
# rm -rf dist && rm -rf build
# python3 -m build && twine check dist/*
# twine upload --repository testpypi dist/*
# pip3 install -i https://test.pypi.org/simple/ aqueduct-ml
name: Publish Test Pypi Packages
runs-on: [ubuntu-latest]
timeout-minutes: 20
outputs:
version: ${{ steps.inject_version.outputs.version }}
permissions:
id-token: write
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.9"
- name: install missing dependencies
run: pip install build twine

- name: inject dev version
id: inject_version
run: |
export VERSION=0.1.dev$(date +%s)
rm sdk/version
echo $VERSION >> sdk/version
rm src/python/version
echo $VERSION >> src/python/version
echo version=$VERSION >> $GITHUB_OUTPUT
- name: build sdk
working-directory: sdk
run: |
rm -rf dist
rm -rf build
python3 -m build
twine check dist/*
- name: publish sdk
uses: pypa/gh-action-pypi-publish@release/v1
with:
repository-url: https://test.pypi.org/legacy/
packages-dir: sdk/dist
password: ${{ secrets.PYPI_API_TOKEN_SDK }}

- name: build executor
working-directory: src/python
run: |
rm -rf dist
rm -rf build
python3 -m build
twine check dist/*
- name: publish executor
uses: pypa/gh-action-pypi-publish@release/v1
with:
repository-url: https://test.pypi.org/legacy/
packages-dir: src/python/dist
password: ${{ secrets.PYPI_API_TOKEN_EXECUTOR }}

run-airflow-tests:
runs-on: ubuntu-latest
timeout-minutes: 480
needs: publish-pypi
name: SDK Integration Tests against Airflow Compute
steps:
- uses: actions/checkout@v2

- uses: ./.github/actions/setup-server
timeout-minutes: 7
with:
python-pkg-version: ${{ needs.publish-pypi.outputs.version }}

# TODO(ENG-2537): Use our separate GH actions credentials.
- uses: ./.github/actions/fetch-test-config
with:
aws_access_key_id: ${{ secrets.KENNY_AWS_ACCESS_KEY_ID }}
aws_secret_access_key: ${{ secrets.KENNY_AWS_SECRET_ACCESS_KEY }}
s3_test_config_path: periodic-airflow-test-config.yml

- name: Start Airflow cluster
working-directory: scripts/compute
run: ./airflow_test_setup.sh &

- name: Wait for Airflow cluster to be ready
run: while ! echo exit | nc localhost 8000; do sleep 1; done

- name: Get Airflow password
run: |
airflow_password=$(cat ~/airflow/standalone_admin_password.txt)
echo "airflow_password=$airflow_password" >> $GITHUB_ENV
- name: Update Airflow password in test-credentials.yml
working-directory: integration_tests/sdk
# NOTE (Saurav): This replaces all password fields under the compute section.
# This is safe to do because only the test_airflow compute is being used
# in this test. Otherwise the logic gets more complicated.
run: sed -i '/^compute:/,/^[^ ]/ s/^\(\s*password:\s*\).*/\1${{ env.airflow_password }}/' test-credentials.yml

- name: Install any data connector packages
run: |
aqueduct install s3
aqueduct install snowflake
- name: Run the SDK Integration Tests
working-directory: integration_tests/sdk
run: pytest aqueduct_tests/ -rP -vv -n 1

- uses: ./.github/actions/upload-artifacts
if: always()
with:
prefix: Airflow Compute

# Sets it as an environmental variable.
# - name: Get the Slack ID for the current oncall
# if: always()
# run: |
# aws s3 cp s3://aqueduct-assets/oncall.yml ./oncall.yml
# echo "ONCALL_SLACK_MEMBER_ID=$(python3 scripts/get_current_oncall.py --file ./oncall.yml)" >> $GITHUB_ENV

# - name: Report to Slack on Failure
# if: always()
# uses: ravsamhq/notify-slack-action@v1
# with:
# status: ${{ job.status }}
# notification_title: ""
# message_format: "{emoji} *{workflow}* has {status_message}"
# footer: "{run_url}"
# notify_when: "failure,warnings"
# mention_users: ${{ env.ONCALL_SLACK_MEMBER_ID }}
# env:
# SLACK_WEBHOOK_URL: ${{ secrets.ACTION_MONITORING_SLACK }}
2 changes: 1 addition & 1 deletion .github/workflows/integration-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ jobs:
- name: Run the SDK Data Integration Tests
timeout-minutes: 20
working-directory: integration_tests/sdk
run: python3 run_tests.py --data-integration -n 2
run: python3 run_tests.py --data -n 2

- uses: ./.github/actions/upload-artifacts
if: always()
Expand Down
82 changes: 75 additions & 7 deletions .github/workflows/periodic-integration-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,80 @@ on:
workflow_dispatch:

jobs:
publish-pypi:
# similar to release scripts, but publish to test pypi:
# rm -rf dist && rm -rf build
# python3 -m build && twine check dist/*
# twine upload --repository testpypi dist/*
# pip3 install -i https://test.pypi.org/simple/ aqueduct-ml
name: Publish Test Pypi Packages
runs-on: [ubuntu-latest]
timeout-minutes: 20
outputs:
version: ${{ steps.inject_version.outputs.version }}
permissions:
id-token: write
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.9"
- name: install missing dependencies
run: pip install build twine

- name: inject dev version
id: inject_version
run: |
export VERSION=0.1.dev$(date +%s)
rm sdk/version
echo $VERSION >> sdk/version
rm src/python/version
echo $VERSION >> src/python/version
echo version=$VERSION >> $GITHUB_OUTPUT
- name: build sdk
working-directory: sdk
run: |
rm -rf dist
rm -rf build
python3 -m build
twine check dist/*
- name: publish sdk
uses: pypa/gh-action-pypi-publish@release/v1
with:
repository-url: https://test.pypi.org/legacy/
packages-dir: sdk/dist
password: ${{ secrets.PYPI_API_TOKEN_SDK }}

- name: build executor
working-directory: src/python
run: |
rm -rf dist
rm -rf build
python3 -m build
twine check dist/*
- name: publish executor
uses: pypa/gh-action-pypi-publish@release/v1
with:
repository-url: https://test.pypi.org/legacy/
packages-dir: src/python/dist
password: ${{ secrets.PYPI_API_TOKEN_EXECUTOR }}

run-k8s-tests:
runs-on: ubuntu-latest
timeout-minutes: 360
name: SDK Integration Tests against K8s Compute
needs: publish-pypi
steps:
- uses: actions/checkout@v2

- uses: ./.github/actions/setup-server
timeout-minutes: 7
with:
python-pkg-version: ${{ needs.publish-pypi.outputs.version }}

# TODO(ENG-2537): Use our separate GH actions credentials.
- uses: ./.github/actions/fetch-test-config
Expand Down Expand Up @@ -100,13 +165,16 @@ jobs:

run-tests-conda:
runs-on: ubuntu-latest-4-cores
timeout-minutes: 40
timeout-minutes: 50
needs: publish-pypi
name: All Integration Tests with Conda
steps:
- uses: actions/checkout@v2

- uses: ./.github/actions/setup-server
timeout-minutes: 7
with:
python-pkg-version: ${{ needs.publish-pypi.outputs.version }}

- uses: conda-incubator/setup-miniconda@v2
with:
Expand All @@ -128,15 +196,15 @@ jobs:
s3_test_config_path: periodic-conda-test-config.yml

- name: Run the SDK Integration Tests
timeout-minutes: 30
timeout-minutes: 40
working-directory: integration_tests/sdk
run: python3 run_tests.py -n 8

- name: Set the API key as an env variable.
run: echo "API_KEY=$(aqueduct apikey)" >> $GITHUB_ENV

- name: Run the No-Concurrency Integration Tests
timeout-minutes: 10
timeout-minutes: 15
working-directory: integration_tests/no_concurrency
env:
SERVER_ADDRESS: localhost:8080
Expand All @@ -148,7 +216,7 @@ jobs:
with:
prefix: Conda

run-data-integration-tests:
run-data-resource-tests:
runs-on: ubuntu-latest
timeout-minutes: 60
name: SDK Integration Tests against Data Connectors
Expand Down Expand Up @@ -217,21 +285,21 @@ jobs:
aqueduct install redshift
- name: Setup Hosted Data Integrations
timeout-minutes: 20
timeout-minutes: 25
working-directory: scripts/data
run: python3 setup_hosted.py --aws-key-id ${{ secrets.SAURAV_AWS_ACCESS_KEY_ID }} --aws-secret-key ${{ secrets.SAURAV_AWS_SECRET_ACCESS_KEY }}

- name: Run the SDK Data Integration Tests
working-directory: integration_tests/sdk
run: python3 run_tests.py --data-integration -n 2
run: python3 run_tests.py --data -n 2

- uses: ./.github/actions/upload-artifacts
if: always()
with:
prefix: Data Connectors

- name: Teardown Hosted Data Integrations
timeout-minutes: 20
timeout-minutes: 25
if: always()
working-directory: scripts/data
run: python3 teardown_hosted.py --aws-key-id ${{ secrets.SAURAV_AWS_ACCESS_KEY_ID }} --aws-secret-key ${{ secrets.SAURAV_AWS_SECRET_ACCESS_KEY }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/skipped-integration-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
- run: |
exit 0
run-tests-data-integrations:
run-tests-data-resources:
runs-on: ubuntu-latest
timeout-minutes: 5
name: SDK Data Integration Tests
Expand Down
Loading

0 comments on commit 76298af

Please sign in to comment.